All Files (28.02% covered at 0.94 hits/line)
419 files in total.
27665 relevant lines.
7751 lines covered and
19914 lines missed
-
# TODO: figure out how to use routing to select/define specific templates
-
# and action_set's to be used for a given route
-
1
r8_require('../../utils/internal/routes/routes')
-
-
1
R8::ReactorRoute.draw do
-
-
# USER
-
1
post 'user/process_login' => "user#process_login"
-
1
get 'user/process_logout' => "user#process_logout"
-
-
# MESSAGES
-
1
get 'messages/retrieve' => "messages#retrieve"
-
-
# INTEGRATION
-
1
post 'integration/spin_tenant' => "integration#spin_tenant"
-
-
# ACCOUNT
-
1
post 'account/set_password' => 'account#set_password'
-
1
post 'account/list_ssh_keys' => 'account#list_ssh_keys'
-
1
post 'account/add_user_direct_access' => 'account#add_user_direct_access'
-
1
post 'account/remove_user_direct_access' => 'account#remove_user_direct_access'
-
1
post 'account/set_default_namespace' => 'account#set_default_namespace'
-
1
post 'account/set_catalog_credentials' => 'account#set_catalog_credentials'
-
1
post 'account/check_catalog_credentials' => 'account#check_catalog_credentials'
-
-
# ASSEMBLY
-
1
post 'assembly/promote_to_template' => 'assembly#promote_to_template'
-
1
post 'assembly/get_action_results' => 'assembly#get_action_results'
-
1
post 'assembly/find_violations' => 'assembly#find_violations'
-
1
post 'assembly/create_task' => 'assembly#create_task'
-
1
post 'assembly/add__service_add_on' => 'assembly#add__service_add_on'
-
1
post 'assembly/create_smoketests_task' => 'assembly#create_smoketests_task'
-
1
post 'assembly/list_attribute_mappings' => 'assembly#list_attribute_mappings'
-
1
post 'assembly/add_ad_hoc_attribute_links' => 'assembly#add_ad_hoc_attribute_links'
-
1
post 'assembly/delete_service_link' => 'assembly#delete_service_link'
-
1
post 'assembly/add_service_link' => 'assembly#add_service_link'
-
1
post 'assembly/list_service_links' => 'assembly#list_service_links'
-
1
post 'assembly/list_remote' => 'assembly#list_remote'
-
1
post 'assembly/list_connections' => 'assembly#list_connections'
-
1
post 'assembly/list_smoketests' => 'assembly#list_smoketests'
-
1
post 'assembly/list_with_workspace' => 'assembly#list_with_workspace'
-
1
post 'assembly/info' => 'assembly#info'
-
1
post 'assembly/rename' => 'assembly#rename'
-
1
post 'assembly/delete' => 'assembly#delete'
-
1
post 'assembly/destroy_and_reset_nodes' => 'assembly#destroy_and_reset_nodes'
-
1
post 'assembly/purge' => 'assembly#purge' #workspace command
-
1
post 'assembly/set_target' => 'assembly#set_target' #workspace command
-
1
post 'assembly/set_attributes' => 'assembly#set_attributes'
-
1
post 'assembly/apply_attribute_settings' => 'assembly#apply_attribute_settings'
-
1
post 'assembly/get_attributes' => 'assembly#get_attributes'
-
1
post 'assembly/add_assembly_template' => 'assembly#add_assembly_template'
-
1
post 'assembly/add_node' => 'assembly#add_node'
-
1
post 'assembly/add_component' => 'assembly#add_component'
-
1
post 'assembly/initiate_get_log' => 'assembly#initiate_get_log'
-
1
post 'assembly/initiate_grep' => 'assembly#initiate_grep'
-
1
post 'assembly/initiate_get_ps' => 'assembly#initiate_get_ps'
-
1
post 'assembly/initiate_execute_tests' => 'assembly#initiate_execute_tests'
-
1
post 'assembly/initiate_action_agent' => 'assembly#initiate_action_agent'
-
1
post 'assembly/list_component_module_diffs' => 'assembly#list_component_module_diffs'
-
1
post 'assembly/print_includes' => 'assembly#print_includes'
-
1
post 'assembly/task_action_detail' => 'assembly#task_action_detail'
-
1
post 'assembly/add_node_group' => 'assembly#add_node_group'
-
1
post 'assembly/get_component_module_info' => 'assembly#get_component_module_info'
-
-
1
post 'assembly/start' => 'assembly#start'
-
1
post 'assembly/stop' => 'assembly#stop'
-
1
post 'assembly/list' => 'assembly#list'
-
1
post 'assembly/workspace_object' => 'assembly#workspace_object'
-
1
post 'assembly/info_about' => 'assembly#info_about'
-
1
post 'assembly/info_about_task' => 'assembly#info_about_task'
-
1
post 'assembly/stage' => 'assembly#stage'
-
1
post 'assembly/deploy' => 'assembly#deploy'
-
1
post 'assembly/task_status' => 'assembly#task_status'
-
1
post 'assembly/remove_from_system' => 'assembly#remove_from_system'
-
1
post 'assembly/initiate_get_netstats' => 'assembly#initiate_get_netstats'
-
1
post 'assembly/get_action_results' => 'assembly#get_action_results'
-
1
post 'assembly/delete_node' => 'assembly#delete_node'
-
1
post 'assembly/delete_component' => 'assembly#delete_component'
-
1
post 'assembly/prepare_for_edit_module' => 'assembly#prepare_for_edit_module'
-
1
post 'assembly/create_component_dependency' => 'assembly#create_component_dependency'
-
1
post 'assembly/promote_module_updates' => 'assembly#promote_module_updates'
-
1
post 'assembly/clear_tasks' => 'assembly#clear_tasks'
-
1
post 'assembly/cancel_task' => 'assembly#cancel_task'
-
1
post 'assembly/initiate_ssh_pub_access' => 'assembly#initiate_ssh_pub_access'
-
1
post 'assembly/list_ssh_access' => 'assembly#list_ssh_access'
-
1
post 'assembly/list_settings' => 'assembly#list_settings'
-
1
post 'assembly/get_component_modules' => 'assembly#get_component_modules'
-
1
post 'assembly/delete_node_group' => 'assembly#delete_node_group'
-
1
post 'assembly/get_node_groups' => 'assembly#get_node_groups'
-
1
post 'assembly/get_nodes_without_node_groups' => 'assembly#get_nodes_without_node_groups'
-
-
# ATTRIBUTE
-
1
post 'attribute/set' => 'attribute#set'
-
-
# COMPONENT
-
1
post 'component/info' => 'component#info'
-
1
post 'component/list' => 'component#list'
-
1
post 'component/stage' => 'component#stage'
-
-
# COMPONENT_MODULE
-
1
post 'component_module/add_user_direct_access' => 'account#add_user_direct_access'
-
1
post 'component_module/info_about' => 'component_module#info_about'
-
1
post 'component_module/pull_from_remote' => 'component_module#pull_from_remote'
-
1
post 'component_module/update_model_from_clone' => 'component_module#update_model_from_clone'
-
1
post 'component_module/delete' => 'component_module#delete'
-
1
post 'component_module/delete_version' => 'component_module#delete_version'
-
1
post 'component_module/test_generate_dsl' => 'component_module#test_generate_dsl'
-
1
post 'component_module/create_new_dsl_version' => 'component_module#create_new_dsl_version'
-
1
post 'component_module/info' => 'component_module#info'
-
1
post 'component_module/list' => 'component_module#list'
-
1
post 'component_module/pull_from_remote' => 'component_module#pull_from_remote'
-
1
post 'component_module/remote_chmod' => 'component_module#remote_chmod'
-
1
post 'component_module/remote_chown' => 'component_module#remote_chown'
-
1
post 'component_module/confirm_make_public' => 'component_module#confirm_make_public'
-
1
post 'component_module/remote_collaboration' => 'component_module#remote_collaboration'
-
1
post 'component_module/list_remote_collaboration' => 'component_module#list_remote_collaboration'
-
1
post 'component_module/resolve_pull_from_remote' => 'component_module#resolve_pull_from_remote'
-
1
post 'component_module/list_remote' => 'component_module#list_remote'
-
1
post 'component_module/versions' => 'component_module#versions'
-
1
post 'component_module/create' => 'component_module#create'
-
1
post 'component_module/import' => 'component_module#import'
-
1
post 'component_module/import_version' => 'component_module#import_version'
-
1
post 'component_module/delete_remote' => 'component_module#delete_remote'
-
1
post 'component_module/export' => 'component_module#export'
-
1
post 'component_module/create_new_version' => 'component_module#create_new_version'
-
1
post 'component_module/get_remote_module_info' => 'component_module#get_remote_module_info'
-
1
post 'component_module/get_workspace_branch_info' => 'component_module#get_workspace_branch_info'
-
1
post 'component_module/update_from_initial_create' => 'component_module#update_from_initial_create'
-
1
post 'component_module/list' => 'component_module#list'
-
1
post 'component_module/install_puppet_forge_modules' => 'component_module#install_puppet_forge_modules'
-
1
post 'component_module/list_remote_diffs' => 'component_module#list_remote_diffs'
-
# WORK WITH GIT REMOTES
-
1
post 'component_module/info_git_remote' => 'component_module#info_git_remote'
-
1
post 'component_module/add_git_remote' => 'component_module#add_git_remote'
-
1
post 'component_module/remove_git_remote' => 'component_module#remove_git_remote'
-
-
# TEST_MODULE
-
1
post 'test_module/add_user_direct_access' => 'account#add_user_direct_access'
-
1
post 'test_module/info_about' => 'test_module#info_about'
-
1
post 'test_module/pull_from_remote' => 'test_module#pull_from_remote'
-
1
post 'test_module/update_model_from_clone' => 'test_module#update_model_from_clone'
-
1
post 'test_module/delete' => 'test_module#delete'
-
1
post 'test_module/delete_version' => 'test_module#delete_version'
-
1
post 'test_module/test_generate_dsl' => 'test_module#test_generate_dsl'
-
1
post 'test_module/create_new_dsl_version' => 'test_module#create_new_dsl_version'
-
1
post 'test_module/info' => 'test_module#info'
-
1
post 'test_module/list' => 'test_module#list'
-
1
post 'test_module/pull_from_remote' => 'test_module#pull_from_remote'
-
1
post 'test_module/remote_chmod' => 'test_module#remote_chmod'
-
1
post 'test_module/remote_chown' => 'test_module#remote_chown'
-
1
post 'test_module/confirm_make_public' => 'test_module#confirm_make_public'
-
1
post 'test_module/remote_collaboration' => 'test_module#remote_collaboration'
-
1
post 'test_module/list_remote_collaboration' => 'test_module#list_remote_collaboration'
-
1
post 'test_module/resolve_pull_from_remote' => 'test_module#resolve_pull_from_remote'
-
1
post 'test_module/list_remote' => 'test_module#list_remote'
-
1
post 'test_module/versions' => 'test_module#versions'
-
1
post 'test_module/create' => 'test_module#create'
-
1
post 'test_module/import' => 'test_module#import'
-
1
post 'test_module/import_version' => 'test_module#import_version'
-
1
post 'test_module/delete_remote' => 'test_module#delete_remote'
-
1
post 'test_module/export' => 'test_module#export'
-
1
post 'test_module/create_new_version' => 'test_module#create_new_version'
-
1
post 'test_module/get_remote_module_info' => 'test_module#get_remote_module_info'
-
1
post 'test_module/get_workspace_branch_info' => 'test_module#get_workspace_branch_info'
-
1
post 'test_module/update_from_initial_create' => 'test_module#update_from_initial_create'
-
1
post 'test_module/list' => 'test_module#list'
-
1
post 'test_module/list_remote_diffs' => 'test_module#list_remote_diffs'
-
# WORK WITH GIT REMOTES
-
1
post 'test_module/info_git_remote' => 'test_module#info_git_remote'
-
1
post 'test_module/add_git_remote' => 'test_module#add_git_remote'
-
1
post 'test_module/remove_git_remote' => 'test_module#remove_git_remote'
-
-
# NODE_MODULE
-
1
post 'node_module/add_user_direct_access' => 'account#add_user_direct_access'
-
1
post 'node_module/info_about' => 'node_module#info_about'
-
1
post 'node_module/pull_from_remote' => 'node_module#pull_from_remote'
-
1
post 'node_module/update_model_from_clone' => 'node_module#update_model_from_clone'
-
1
post 'node_module/delete' => 'node_module#delete'
-
1
post 'node_module/delete_version' => 'node_module#delete_version'
-
1
post 'node_module/test_generate_dsl' => 'node_module#test_generate_dsl'
-
1
post 'node_module/create_new_dsl_version' => 'node_module#create_new_dsl_version'
-
1
post 'node_module/info' => 'node_module#info'
-
1
post 'node_module/list' => 'node_module#list'
-
1
post 'node_module/pull_from_remote' => 'node_module#pull_from_remote'
-
1
post 'node_module/remote_chmod' => 'node_module#remote_chmod'
-
1
post 'node_module/remote_chown' => 'node_module#remote_chown'
-
1
post 'node_module/confirm_make_public' => 'node_module#confirm_make_public'
-
1
post 'node_module/remote_collaboration' => 'node_module#remote_collaboration'
-
1
post 'node_module/list_remote_collaboration' => 'node_module#list_remote_collaboration'
-
1
post 'node_module/list_remote' => 'node_module#list_remote'
-
1
post 'node_module/versions' => 'node_module#versions'
-
1
post 'node_module/create' => 'node_module#create'
-
1
post 'node_module/import' => 'node_module#import'
-
1
post 'node_module/import_version' => 'node_module#import_version'
-
1
post 'node_module/delete_remote' => 'node_module#delete_remote'
-
1
post 'node_module/export' => 'node_module#export'
-
1
post 'node_module/create_new_version' => 'node_module#create_new_version'
-
1
post 'node_module/get_remote_module_info' => 'node_module#get_remote_module_info'
-
1
post 'node_module/get_workspace_branch_info' => 'node_module#get_workspace_branch_info'
-
1
post 'node_module/update_from_initial_create' => 'node_module#update_from_initial_create'
-
1
post 'node_module/list' => 'node_module#list'
-
-
# DEPENDENCY
-
1
post 'dependency/add_component_dependency' => 'dependency#add_component_dependency'
-
-
# LIBRARY
-
1
post 'library/list' => 'library#list'
-
1
post 'library/info_about' => 'library#info_about'
-
-
# METADATA
-
1
get 'metadata/get_metadata' => 'metadata#get_metadata'
-
-
# MONITORING_ITEM
-
1
post 'monitoring_item/check_idle' => 'monitoring_item#check_idle'
-
-
# NODE TEMPLATE
-
1
post 'node/list' => 'node#list'
-
1
post 'node/image_upgrade' => 'node#image_upgrade'
-
1
post 'node/add_node_template' => 'node#add_node_template'
-
1
post 'node/delete_node_template' => 'node#delete_node_template'
-
-
# NODE INSTANCE
-
1
post 'node/start' => 'node#start'
-
1
post 'node/stop' => 'node#stop'
-
# these commands right now should only be called wrt to assembly context
-
=begin
-
post 'node/find_violations' => 'node#find_violations'
-
post 'node/get_attributes' => 'node#get_attributes'
-
post 'node/set_attributes' => 'node#set_attributes'
-
post 'node/add_component' => 'node#add_component'
-
post 'node/delete_component' => 'node#delete_component'
-
post 'node/create_task' => 'node#create_task'
-
-
post 'node/info' => 'node#info'
-
post 'node/info_about' => 'node#info_about'
-
post 'node/destroy_and_delete' => 'node#destroy_and_delete'
-
post 'node/get_op_status' => 'node#get_op_status'
-
-
post 'node/task_status' => 'node#task_status'
-
post 'node/stage' => 'node#stage'
-
post 'node/initiate_get_netstats' => 'node#initiate_get_netstats'
-
post 'node/get_action_results' => 'node#get_action_results'
-
post 'node/initiate_get_ps' => 'node#initiate_get_ps'
-
post 'node/initiate_execute_tests' => 'node#initiate_execute_tests'
-
=end
-
-
-
# NODE_GROUP
-
=begin
-
post 'node_group/list' => 'node_group#list'
-
post 'node_group/get_attributes' => 'node_group#get_attributes '
-
post 'node_group/set_attributes' => 'node_group#set_attributes'
-
post 'node_group/task_status' => 'node_group#task_status'
-
post 'node_group/create' => 'node_group#create'
-
post 'node_group/delete' => 'node_group#delete'
-
post 'node_group/info_about' => 'node_group#info_about'
-
post 'node_group/get_members' => 'node_group#get_members'
-
post 'node_group/add_component' => 'node_group#add_component'
-
post 'node_group/delete_component' => 'node_group#delete_component'
-
post 'node_group/create_task' => 'node_group#create_task'
-
post 'node_group/set_default_template_node' => 'node_group#set_default_template_node'
-
post 'node_group/clone_and_add_template_node' => 'node_group#clone_and_add_template_node'
-
=end
-
-
# PROJECT
-
1
post 'project/list' => 'project#list'
-
-
# REPO
-
1
post 'repo/list' => 'repo#list'
-
1
post 'repo/delete' => 'repo#delete'
-
1
post 'repo/synchronize_target_repo' => 'repo#synchronize_target_repo'
-
-
# SERVICE_MODULE
-
1
post 'service_module/add_user_direct_access' => 'account#add_user_direct_access'
-
1
post 'service_module/list_component_modules' => 'service_module#list_component_modules'
-
1
post 'service_module/update_model_from_clone' => 'service_module#update_model_from_clone'
-
1
post 'service_module/import' => 'service_module#import'
-
1
post 'service_module/create' => 'service_module#create'
-
1
post 'service_module/pull_from_remote' => 'service_module#pull_from_remote'
-
1
post 'service_module/remote_chmod' => 'service_module#remote_chmod'
-
1
post 'service_module/remote_chown' => 'service_module#remote_chown'
-
1
post 'service_module/confirm_make_public' => 'service_module#confirm_make_public'
-
1
post 'service_module/remote_collaboration' => 'service_module#remote_collaboration'
-
1
post 'service_module/list_remote_collaboration' => 'service_module#list_remote_collaboration'
-
1
post 'service_module/resolve_pull_from_remote' => 'service_module#resolve_pull_from_remote'
-
1
post 'service_module/list' => 'service_module#list'
-
1
post 'service_module/list_remote' => 'service_module#list_remote'
-
1
post 'service_module/versions' => 'service_module#versions'
-
1
post 'service_module/list_assemblies' => 'service_module#list_assemblies'
-
1
post 'service_module/list_instances' => 'service_module#list_instances'
-
1
post 'service_module/list_component_modules' => 'service_module#list_component_modules'
-
1
post 'service_module/import_version' => 'service_module#import_version'
-
1
post 'service_module/export' => 'service_module#export'
-
1
post 'service_module/create_new_version' => 'service_module#create_new_version'
-
1
post 'service_module/set_component_module_version' => 'service_module#set_component_module_version'
-
1
post 'service_module/delete' => 'service_module#delete'
-
1
post 'service_module/delete_version' => 'service_module#delete_version'
-
1
post 'service_module/delete_remote' => 'service_module#delete_remote'
-
1
post 'service_module/delete_assembly_template' => 'service_module#delete_assembly_template'
-
1
post 'service_module/get_remote_module_info' => 'service_module#get_remote_module_info'
-
1
post 'service_module/get_workspace_branch_info' => 'service_module#get_workspace_branch_info'
-
1
post 'service_module/info' => 'service_module#info'
-
1
post 'service_module/pull_from_remote' => 'service_module#pull_from_remote'
-
1
post 'service_module/list_remote_diffs' => 'service_module#list_remote_diffs'
-
# WORK WITH GIT REMOTES
-
1
post 'service_module/info_git_remote' => 'service_module#info_git_remote'
-
1
post 'service_module/add_git_remote' => 'service_module#add_git_remote'
-
1
post 'service_module/remove_git_remote' => 'service_module#remove_git_remote'
-
-
# get 'service_module/workspace_branch_info/#{service_module_id.to_s}' => 'service_module#workspace_branch_info/#{service_module_id.to_s}'
-
-
# STATE_CHANGE
-
1
get 'state_change/list_pending_changes' => 'state_change#list_pending_changes'
-
-
# TARGET
-
1
post 'target/list' => 'target#list'
-
1
post 'target/create' => 'target#create'
-
1
post 'target/create_provider' => 'target#create_provider'
-
1
post 'target/set_default' => 'target#set_default'
-
1
post 'target/info_about' => 'target#info_about'
-
1
post 'target/import_nodes' => 'target#import_nodes'
-
1
post 'target/delete_and_destroy' => 'target#delete_and_destroy'
-
1
post 'target/info' => 'target#info'
-
1
post 'target/install_agents' => 'target#install_agents'
-
1
post 'target/create_install_agents_task' => 'target#create_install_agents_task'
-
1
post 'target/task_status' => 'target#task_status'
-
1
post 'target/set_properties' => 'target#set_properties'
-
-
# TASK
-
1
post 'task/cancel_task' => 'task#cancel_task'
-
1
post 'task/execute' => 'task#execute'
-
1
post 'task/list' => 'task#list'
-
1
post 'task/status' => 'task#status'
-
1
post 'task/create_task_from_pending_changes' => 'task#create_task_from_pending_changes'
-
-
# DEVELOPER
-
1
post 'developer/inject_agent' => 'developer#inject_agent'
-
-
# NAMESPACE
-
1
post 'namespace/default_namespace_name' => 'namespace#default_namespace_name'
-
end
-
-
1
R8::Routes[:login] = {
-
:alias => 'user/login',
-
}
-
# Routes that correspond to (non-trivial action sets)
-
=begin
-
R8::Routes["component/display"] = {
-
:layout => 'default',
-
:alias => '',
-
:params => [:id],
-
:action_set =>
-
[
-
{
-
:route => "component/display",
-
:action_params => ["$id$"],
-
:panel => "main_body"
-
},
-
{
-
:route => "attribute/list_for_component_display",
-
:action_params => [{:parent_id => "$id$"}],
-
:panel => "main_body",
-
# :assign_type => 'append | prepend | replace'
-
:assign_type => :append
-
},
-
{
-
:route => "monitoring_item/list_for_component_display",
-
:action_params => [{:parent_id => "$id$"}],
-
:panel => "main_body",
-
:assign_type => :append
-
}
-
]
-
}
-
=end
-
1
R8::Routes["node/display"] = {
-
:layout => 'default',
-
:alias => '',
-
:params => [:id],
-
:action_set =>
-
[
-
{
-
:route => "node/display",
-
:action_params => ["$id$"],
-
:panel => "main_body"
-
},
-
{
-
:route => "node_interface/list",
-
:action_params => [{:parent_id => "$id$"}],
-
:panel => "main_body",
-
:assign_type => :append
-
},
-
{
-
:route => "monitoring_item/node_display",
-
:action_params => [{:parent_id => "$id$"}],
-
:panel => "main_body",
-
:assign_type => :append
-
}
-
]
-
}
-
-
1
R8::Routes["state_change/list_pending"] = {
-
:layout => 'default',
-
:alias => '',
-
:params => [],
-
:action_set =>
-
[
-
{
-
:route => "state_change/list",
-
#:state_change_id => nil will only pick up top level state changes second condition just picks out pending changes
-
:action_params => [{:state_change_id => nil}, {:status => "pending"}],
-
:panel => "main_body"
-
}
-
]
-
}
-
-
1
R8::Routes["state_change/display"] = {
-
:layout => 'default',
-
:alias => '',
-
:params => [:id],
-
:action_set =>
-
[
-
{
-
:route => "state_change/display",
-
:action_params => ["$id$"],
-
:panel => "main_body"
-
},
-
{
-
:route => "state_change/list",
-
:action_params => [{:parent_id => "$id$"}],
-
:panel => "main_body",
-
:assign_type => :append
-
}
-
]
-
}
-
-
1
R8::Routes["task/list"] = {
-
:layout => 'default',
-
:alias => '',
-
:params => [],
-
:action_set =>
-
[
-
{
-
:route => "task/list",
-
#:task_id will only pick up top level tasks
-
:action_params => [{:task_id => nil}],
-
:panel => "main_body"
-
}
-
]
-
}
-
-
1
R8::Routes["task/display"] = {
-
:layout => 'default',
-
:alias => '',
-
:params => [:id],
-
:action_set =>
-
[
-
{
-
:route => "task/display",
-
:action_params => ["$id$"],
-
:panel => "main_body"
-
},
-
{
-
:route => "task/list",
-
:action_params => [{:parent_id => "$id$"}],
-
:panel => "main_body",
-
:assign_type => :append
-
}
-
]
-
}
-
-
1
R8::Routes["component/testjsonlayout"] = {
-
:layout => 'testjson'
-
}
-
-
1
R8::Routes["workspace"] = {
-
:layout => 'workspace'
-
}
-
1
R8::Routes["workspace/index"] = {
-
# :layout => 'workspace'
-
:layout => 'workspace2'
-
}
-
1
R8::Routes["workspace/loaddatacenter"] = {
-
:layout => 'workspace'
-
}
-
1
R8::Routes["workspace/list_items"] = {
-
:layout => 'workspace'
-
}
-
1
R8::Routes["workspace/list_items_new"] = {
-
:layout => 'workspace'
-
}
-
-
1
R8::Routes["workspace/list_items_2"] = {
-
:layout => 'workspace'
-
}
-
-
1
R8::Routes["user/login"] = {
-
:layout => 'login'
-
}
-
-
1
R8::Routes["user/register"] = {
-
:layout => 'login'
-
}
-
-
1
R8::Routes["datacenter/load_vspace"] = {
-
:layout => 'workspace'
-
}
-
-
-
1
R8::Routes["component/details"] = {
-
:layout => 'details2'
-
}
-
1
R8::Routes["component/details2"] = {
-
:layout => 'details2'
-
}
-
-
1
R8::Routes["datacenter/list"] = {
-
:layout => 'dashboard'
-
}
-
1
R8::Routes["component/list"] = {
-
# :layout => 'inventory'
-
:layout => 'library'
-
}
-
1
R8::Routes["library/index"] = {
-
:layout => 'library'
-
}
-
-
1
R8::Routes["node/list"] = {
-
:layout => 'inventory'
-
}
-
-
1
R8::Routes["datacenter/list"] = {
-
:layout => 'inventory'
-
}
-
-
1
R8::Routes["inventory/index"] = {
-
:layout => 'inventory'
-
}
-
-
1
R8::Routes["editor/index"] = {
-
:layout => 'editor'
-
}
-
-
1
R8::Routes["ide/index"] = {
-
# :layout => 'ide'
-
:layout => 'workspace2'
-
}
-
1
R8::Routes["ide/test_tree"] = {
-
:layout => 'ide'
-
}
-
-
1
R8::Routes["import/index"] = {
-
:layout => 'import'
-
}
-
1
R8::Routes["import/load_wizard"] = {
-
:layout => 'import'
-
}
-
1
R8::Routes["import/step_one"] = {
-
:layout => 'import'
-
}
-
1
R8::Routes["import/step_two"] = {
-
:layout => 'import'
-
}
-
1
R8::Routes["import/step_three"] = {
-
:layout => 'import'
-
}
-
-
1
R8::Routes.freeze
-
-
1
module DTK
-
1
class AccountController < AuthController
-
-
1
PUB_KEY_NAME_REGEX = /[a-zA-Z0-9_\-]*/
-
-
1
def rest__set_password()
-
password = ret_non_null_request_params(:new_password)
-
user = CurrentSession.new.get_user_object()
-
-
rest_ok_response user.update_password(password)
-
end
-
-
1
def rest__list_ssh_keys()
-
username = ret_non_null_request_params(:username)
-
model_handle = model_handle_with_private_group()
-
rest_ok_response RepoUser.get_matching_repo_users(model_handle.createMH(:repo_user), {:type => 'client'}, username, ["username"])
-
end
-
-
# we use this method to add user access to modules / servier / repo manager
-
1
def rest__add_user_direct_access
-
rsa_pub_key = ret_non_null_request_params(:rsa_pub_key)
-
# username in this context is rsa pub key name
-
username = ret_request_params(:username)
-
-
# also a flag to see if there were any errors
-
repoman_registration_error = nil
-
-
if username && !username.eql?(username.match(PUB_KEY_NAME_REGEX)[0])
-
raise DTK::Error, "Invalid format of pub key name, characters allower are: '#{PUB_KEY_NAME_REGEX.source.gsub('\\','')}'"
-
end
-
-
# we do this check in add user direct as well but for simplicity we will duplicate it here as well
-
if RepoUser.find_by_pub_key(model_handle_with_private_group(), rsa_pub_key)
-
raise ErrorUsage, RepoUser::SSH_KEY_EXISTS
-
end
-
-
begin
-
# Add Repo Manager user
-
response = Repo::Remote.new.add_client_access(rsa_pub_key, username)
-
rescue DTK::Error => e
-
# we conditionally ignore it and we fix it later when calling repomanager
-
Log.warn("We were not able to add user to Repo Manager, reason: #{e.message}")
-
-
# this is terrible practice but error/response classes are so tightly coupled to rest of the code
-
# that I do not dare change them
-
if e.message.include?("Name has already been taken")
-
raise ErrorUsage, "Please choose a different name for your key, name has been taken"
-
end
-
-
repoman_registration_error = e.message
-
end
-
-
# Service call
-
match_service, repo_user_service = ServiceModule.add_user_direct_access(model_handle_with_private_group(:service_module), rsa_pub_key, username)
-
-
# Module call
-
match_module, repo_user_module = ComponentModule.add_user_direct_access(model_handle_with_private_group(:component_module), rsa_pub_key, username)
-
-
# match is boolean to see if there has been natch
-
match = match_service && match_module
-
matched_repo_user = repo_user_service || repo_user_module
-
-
# set a flag in database if this user has been registered to repoman
-
matched_repo_user.update(:repo_manager_direct_access => true) if repoman_registration_error.nil?
-
-
# only if user exists already
-
Log.info("User ('#{matched_repo_user[:username]}') exists with given PUB key, not able to create a user. ") if match
-
-
rest_ok_response(
-
:repo_manager_fingerprint => RepoManager.repo_server_ssh_rsa_fingerprint(),
-
:repo_manager_dns => RepoManager.repo_server_dns(),
-
:match => match,
-
:new_username => matched_repo_user ? matched_repo_user[:username] : nil,
-
:matched_username => match && matched_repo_user ? matched_repo_user[:username] : nil,
-
:repoman_registration_error => repoman_registration_error
-
)
-
end
-
-
1
def rest__remove_user_direct_access()
-
username = ret_non_null_request_params(:username)
-
repoman_registration_error = nil
-
-
# if id instead of username
-
if username.to_s =~ /^[0-9]+$/
-
model_handle = model_handle_with_private_group()
-
user_mh = model_handle.createMH(:repo_user)
-
user = User.get_user_by_id( user_mh, username)
-
username = user[:username] if user
-
end
-
-
begin
-
response = Repo::Remote.new.remove_client_access(username)
-
rescue DTK::Error => e
-
# we ignore it and we fix it later when calling repomanager
-
Log.warn("We were not able to remove user from Repo Manager, reason: #{e.message}")
-
repoman_registration_error = e.message
-
end
-
-
ServiceModule.remove_user_direct_access(model_handle_with_private_group(:service_module),username)
-
ComponentModule.remove_user_direct_access(model_handle_with_private_group(:component_module),username)
-
-
rest_ok_response(
-
:repoman_registration_error => repoman_registration_error
-
)
-
end
-
-
1
def rest__set_default_namespace()
-
namespace = ret_non_null_request_params(:namespace)
-
-
user_object = CurrentSession.new.get_user_object()
-
user_object.update(:default_namespace => namespace)
-
CurrentSession.new.set_user_object(user_object)
-
-
rest_ok_response
-
end
-
-
1
def rest__check_catalog_credentials()
-
rest_ok_response(
-
:catalog_credentials_set => CurrentSession.are_catalog_credentilas_set?
-
)
-
end
-
-
1
def rest__set_catalog_credentials()
-
username, password = ret_non_null_request_params(:username, :password)
-
validate = ret_request_params(:validate)
-
-
# if validate param is sent - validate if credentials exist on repo manager
-
# used when creating new user on client and setting catalog credentials in initial step
-
Repo::Remote.new.validate_catalog_credentials(username, password) if validate
-
-
user_object = CurrentSession.new.get_user_object()
-
user_object.update(:catalog_username => username, :catalog_password => password)
-
session_obj = CurrentSession.new
-
session_obj.set_user_object(user_object)
-
# we invalidate the session for repoman
-
session_obj.set_repoman_session_id(nil)
-
-
rest_ok_response
-
end
-
end
-
end
-
1
require 'base64'
-
-
1
r8_require('../../utils/performance_service')
-
-
1
module DTK
-
1
class ActionsetController < Controller
-
1
def process(*route)
-
10
route_key = route[0..1].join("/")
-
10
action_set_params = route[2..route.size-1]||[]
-
10
model_name = route[0].to_sym
-
-
10
route = R8::ReactorRoute.validate_route(request.request_method, route_key)
-
-
# return 404 Resource Not Found if route is not valid
-
10
respond("#{route_key}!", 404) unless route
-
-
# we set new model
-
10
model_name = route.first.to_sym
-
# we rewrite route key to new mapped one
-
10
route_key = route.join('/')
-
10
begin
-
10
ramaze_user = user_object()
-
rescue ::Sequel::DatabaseDisconnectError, ::Sequel::DatabaseConnectionError => e
-
respond(e, 403)
-
end
-
-
10
unless route.first == "user"
-
9
unless logged_in?
-
unless R8::Config[:session][:cookie][:disabled]
-
if request.cookies["dtk-user-info"]
-
# Log.debug "Session cookie is beeing used to revive this session"
-
-
# using cookie to take session information
-
# composed data is consistent form user_id, expire timestamp, and tenant id
-
# URL encoding is transfering + sign to ' ', so we correct that via gsub
-
cookie_data = Base64.decode64(request.cookies["dtk-user-info"].gsub(' ','+'))
-
composed_data = ::AESCrypt.decrypt(cookie_data, ENCRYPTION_SALT, ENCRYPTION_SALT)
-
-
user_id, time_integer, c = composed_data.split('_')
-
-
# make sure that cookie has not expired
-
if (time_integer.to_i >= Time.now.to_i)
-
# due to tight coupling between model_handle and user_object we will set
-
# model handle manually
-
begin
-
ramaze_user = User.get_user_by_id( { :model_name => :user, :c => c }, user_id)
-
rescue ::Sequel::DatabaseDisconnectError, ::Sequel::DatabaseConnectionError => e
-
respond(e, 403)
-
end
-
-
# TODO: [Haris] This is workaround to make sure that user is logged in, due to Ramaze design
-
# this is easiest way to do it. But does feel dirty.
-
# TODO: [Haris] This does not work since user is not persisted, look into this after cookie bug is resolved
-
user_login(ramaze_user.merge(:access_time => Time.now))
-
-
# we set :last_ts as access time for later check
-
session.store(:last_ts, Time.now.to_i)
-
-
# Log.debug "Session cookie has been used to temporary revive user session"
-
end
-
end
-
end
-
end
-
-
9
session = CurrentSession.new
-
9
session.set_user_object(ramaze_user)
-
9
session.set_auth_filters(:c,:group_ids)
-
-
9
login_first unless R8::Config[:development_test_user]
-
end
-
-
10
@json_response = true if ajax_request?
-
-
# seperate route in 'route_key' (e.g., object/action, object) and its params 'action_set_params'
-
# first two (or single items make up route_key; the rest are params
-
-
-
10
action_set_def = R8::Routes[route_key] || Hash.new
-
10
@action_set_param_map = ret_action_set_param_map(action_set_def,action_set_params)
-
-
10
@layout = (R8::Routes[route_key] ? R8::Routes[route_key][:layout] : nil) || R8::Config[:default_layout]
-
-
# if a config is defined for route, use values from config
-
10
if action_set_def[:action_set]
-
run_action_set(action_set_def[:action_set],model_name)
-
else #create an action set of length one and run it
-
10
action_set = compute_singleton_action_set(action_set_def,route_key,action_set_params)
-
10
run_action_set(action_set)
-
end
-
-
10
Log.info("USER -- : '#{ramaze_user[:username]}'")
-
end
-
1
private
-
1
def compute_singleton_action_set(action_set_def,route_key,action_set_params)
-
10
action_params = action_set_params
-
10
query_string = ret_parsed_query_string_from_uri()
-
10
action_params << query_string unless query_string.empty?
-
10
action = {
-
:route => action_set_def[:route] || route_key,
-
:action_params => action_params
-
}
-
10
unless rest_request?
-
action.merge!(
-
:panel => action_set_def[:panel] || :main_body,
-
:assign_type => action_set_def[:assign_type] || :replace
-
)
-
end
-
10
[action]
-
end
-
-
# parent_model_name only set when top level action decomposed as opposed to when an action set of length one is created
-
1
def run_action_set(action_set,parent_model_name=nil)
-
10
PerformanceService.log("OPERATION=#{action_set.first[:route]}")
-
10
PerformanceService.log("REQUEST_PARAMS=#{request.params.to_json}")
-
10
if rest_request?
-
10
unless (action_set||[]).size == 1
-
raise Error.new("If rest response action set must just have one element")
-
end
-
10
PerformanceService.start("PERF_OPERATION_DUR")
-
10
run_rest_action(action_set.first,parent_model_name)
-
10
PerformanceService.end("PERF_OPERATION_DUR")
-
10
return
-
end
-
-
@ctrl_results = ControllerResultsWeb.new
-
-
# Execute each of the actions in the action_set and set the returned content
-
(action_set||[]).each do |action|
-
model,method = action[:route].split("/")
-
method ||= :index
-
action_namespace = "#{R8::Config[:application_name]}_#{model}_#{method}".to_sym
-
result = call_action(action,parent_model_name)
-
-
ctrl_result = Hash.new
-
-
if result and result.length > 0
-
# if a hash is returned, turn make result an array list of one
-
if result.kind_of?(Hash)
-
ctrl_result[:content] = [result]
-
else
-
ctrl_result = result
-
end
-
panel_content_track = {}
-
# for each piece of content set by controller result,make sure panel and assign type is set
-
ctrl_result[:content].each_with_index do |item,index|
-
# set the appropriate panel to render results to
-
panel_name = (ctrl_result[:content][index][:panel] || action[:panel] || :main_body).to_sym
-
panel_content_track[panel_name] ? panel_content_track[panel_name] +=1 : panel_content_track[panel_name] = 1
-
ctrl_result[:content][index][:panel] = panel_name
-
-
(panel_content_track[panel_name] == 1) ? dflt_assign_type = :replace : dflt_assign_type = :append
-
# set the appropriate render assignment type (append | prepend | replace)
-
ctrl_result[:content][index][:assign_type] = (ctrl_result[:content][index][:assign_type] || action[:assign_type] || dflt_assign_type).to_sym
-
-
# set js with base cache uri path
-
ctrl_result[:content][index][:src] = "#{R8::Config[:base_js_cache_uri]}/#{ctrl_result[:content][index][:src]}" if !ctrl_result[:content][index][:src].nil?
-
end
-
end
-
-
ctrl_result[:js_includes] = ret_js_includes()
-
ctrl_result[:css_includes] = ret_css_includes()
-
ctrl_result[:js_exe_list] = ret_js_exe_list()
-
-
@ctrl_results.add(action_namespace,ctrl_result)
-
end
-
end
-
-
1
def run_rest_action(action,parent_model_name=nil)
-
10
model, method = action[:route].split("/")
-
10
method ||= :index
-
10
result = nil
-
10
begin
-
10
result = call_action(action,parent_model_name)
-
rescue SessionTimeout => e
-
# TODO: see why dont have result = auth_forbidden_response(e.message)
-
Log.info "Session error: #{e.message}"
-
auth_forbidden_response(e.message)
-
rescue SessionError => e
-
# TODO: see why dont have result = auth_unauthorized_response(e.message)
-
auth_unauthorized_response(e.message)
-
# rescue ErrorUsage::Warning => e
-
# TODO: handke warnings sepcially; right now handling just like errors
-
rescue Exception => e
-
4
if e.kind_of?(ErrorUsage)
-
# TODO: respond_to? is probably not needed
-
1
unless e.respond_to?(:donot_log_error) and e.donot_log_error()
-
1
Log.error_pp([e,e.backtrace[0]])
-
end
-
else
-
3
Log.error_pp([e,e.backtrace[0..20]])
-
end
-
-
4
result = rest_notok_response(RestError.create(e).hash_form())
-
end
-
10
@ctrl_results = ControllerResultsRest.new(result)
-
end
-
-
1
def call_action(action,parent_model_name=nil)
-
10
model,method = action[:route].split("/")
-
10
controller_class = XYZ.const_get("#{model.capitalize}Controller")
-
10
method ||= :index
-
10
if rest_request?()
-
10
rest_variant = "rest__#{method}"
-
10
if controller_class.method_defined?(rest_variant)
-
9
method = rest_variant
-
end
-
end
-
10
model_name = model.to_sym
-
10
processed_params = process_action_params(action[:action_params])
-
10
action_set_params = ret_search_object(processed_params,model_name,parent_model_name)
-
10
uri_params = ret_uri_params(processed_params)
-
10
variables = {:action_set_params => action_set_params}
-
10
unless rest_request?()
-
variables.merge!(
-
:js_includes => @js_includes,
-
:css_includes => @css_includes,
-
:js_exe_list => @js_exe_list
-
)
-
end
-
-
10
a = Ramaze::Action.create(
-
:node => controller_class,
-
:method => method.to_sym,
-
:params => uri_params,
-
6
:engine => lambda{|action, value| value },
-
:variables => variables
-
)
-
-
10
return a.call
-
end
-
-
-
1
def ret_search_object(processed_params,model_name,parent_model_name=nil)
-
# TODO: assume everything is just equal
-
10
filter_params = processed_params.select{|p|p.kind_of?(Hash)}
-
10
return nil if filter_params.empty?
-
# for processing :parent_id
-
parent_id_field_name = ModelHandle.new(ret_session_context_id(),model_name,parent_model_name).parent_id_field_name?()
-
filter = [:and] + filter_params.map do |el|
-
raw_pair = [el.keys.first,el.values.first]
-
[:eq] + raw_pair.map{|x| x == :parent_id ? parent_id_field_name : x}
-
end
-
{"search" => {
-
"search_pattern" => {
-
:relation => model_name,
-
:filter => filter
-
}
-
}
-
}
-
end
-
-
1
def ret_uri_params(processed_params)
-
10
processed_params.select{|p|not p.kind_of?(Hash)}
-
end
-
-
# does substitution of free variables in raw_params
-
1
def process_action_params(raw_params)
-
# short circuit if no params that need substituting
-
10
return raw_params if @action_set_param_map.empty?
-
if raw_params.kind_of?(Array)
-
raw_params.map{|p|process_action_params(p)}
-
elsif raw_params.kind_of?(Hash)
-
ret = Hash.new
-
raw_params.each{|k,v|ret[k] = process_action_params(v)}
-
ret
-
elsif raw_params.kind_of?(String)
-
ret = raw_params.dup
-
@action_set_param_map.each{|k,v|ret.gsub!(Regexp.new("\\$#{k}\\$"),v.to_s)}
-
ret
-
else
-
raw_params
-
end
-
end
-
-
1
def ret_action_set_param_map(action_set_def,action_set_params)
-
10
ret = Hash.new
-
10
return ret if action_set_def.nil?
-
10
i = 0
-
10
(action_set_def[:params]||[]).each do |param_name|
-
if i < action_set_params.size
-
ret[param_name] = action_set_params[i]
-
else
-
ret[param_name] = nil
-
Log.info("action set param #{param_name} not specfied in action set call")
-
end
-
i = i+1
-
end
-
10
ret
-
end
-
-
-
# TODO: lets finally kill off the xyz and move route loading into some sort of initialize or route setup call
-
# enter the routes defined in config into Ramaze
-
-
1
Ramaze::Route["route_to_actionset"] = lambda{ |path, request|
-
10
if path =~ Regexp.new("^/xyz") and not path =~ Regexp.new("^/xyz/devtest")
-
path.gsub(Regexp.new("^/xyz"),"/xyz/actionset/process")
-
elsif path =~ Regexp.new("^/rest")
-
10
path.gsub(Regexp.new("^/rest"),"/xyz/actionset/process")
-
end
-
}
-
end
-
end
-
1
module DTK
-
1
class AssemblyController < AuthController
-
1
helper :assembly_helper
-
1
helper :task_helper
-
-
1
include Assembly::Instance::Action
-
-
#### create and delete actions ###
-
# TODO: rename to delete_and_destroy
-
1
def rest__delete()
-
assembly_id,subtype = ret_assembly_params_id_and_subtype()
-
if subtype == :template
-
# returning module_repo_info so client can update this in its local module
-
rest_ok_response Assembly::Template.delete_and_ret_module_repo_info(id_handle(assembly_id))
-
else #subtype == :instance
-
Assembly::Instance.delete(id_handle(assembly_id),:destroy_nodes => true)
-
rest_ok_response
-
end
-
end
-
-
1
def rest__purge()
-
workspace = ret_workspace_object?()
-
workspace.purge(:destroy_nodes => true)
-
rest_ok_response
-
end
-
-
1
def rest__destroy_and_reset_nodes()
-
assembly = ret_assembly_instance_object()
-
assembly.destroy_and_reset_nodes()
-
rest_ok_response
-
end
-
-
1
def rest__remove_from_system()
-
assembly = ret_assembly_instance_object()
-
Assembly::Instance.delete(assembly.id_handle())
-
rest_ok_response
-
end
-
-
1
def rest__set_target()
-
workspace = ret_workspace_object?()
-
target = create_obj(:target_id, Target::Instance)
-
workspace.set_target(target)
-
rest_ok_response
-
end
-
-
1
def rest__delete_node()
-
assembly = ret_assembly_instance_object()
-
node_idh = ret_node_or_group_member_id_handle(:node_id,assembly)
-
assembly.delete_node(node_idh,:destroy_nodes => true)
-
rest_ok_response
-
end
-
-
1
def rest__delete_node_group()
-
assembly = ret_assembly_instance_object()
-
node_idh = ret_node_or_group_member_id_handle(:node_id,assembly)
-
assembly.delete_node_group(node_idh)
-
rest_ok_response
-
end
-
-
1
def rest__get_node_groups()
-
assembly = ret_assembly_instance_object()
-
rest_ok_response assembly.get_node_groups()
-
end
-
-
1
def rest__get_nodes_without_node_groups()
-
assembly = ret_assembly_instance_object()
-
rest_ok_response assembly.get_nodes__expand_node_groups(:remove_node_groups=>true)
-
end
-
-
1
def rest__delete_component()
-
node_id = nil
-
assembly = ret_assembly_instance_object()
-
-
# Retrieving node_id to validate if component belongs to node when delete-component invoked from component-level context
-
node_id = ret_node_id(:node_id,assembly) if ret_request_params(:node_id)
-
-
component_id = ret_non_null_request_params(:component_id)
-
assembly_id = assembly.id()
-
cmp_full_name = ret_request_params(:cmp_full_name)
-
-
# cmp_name, namespace = ret_non_null_request_params(:component_id, :namespace)
-
cmp_name, namespace = ret_request_params(:component_id, :namespace)
-
-
assembly_idh = assembly.id_handle()
-
cmp_mh = assembly_idh.createMH(:component)
-
-
if cmp_full_name && node_id
-
# cmp_idh = ret_component_id_handle(:cmp_full_name,:assembly_id => assembly_id)
-
component = Component.ret_component_with_namespace_for_node(cmp_mh, cmp_name, node_id, namespace, assembly)
-
raise ErrorUsage.new("Component with identifier (#{namespace.nil? ? '' : namespace + ':'}#{cmp_name}) does not exist!") unless component
-
-
cmp_idh = component.id_handle()
-
else
-
cmp_idh = id_handle(component_id,:component)
-
end
-
-
assembly.delete_component(cmp_idh, node_id)
-
rest_ok_response
-
end
-
-
#### end: create and delete actions ###
-
#### list and info actions ###
-
1
def rest__info()
-
assembly = ret_assembly_object()
-
node_id, component_id, attribute_id, return_json, only_node_group_info = ret_request_params(:node_id, :component_id, :attribute_id, :json_return, :only_node_group_info)
-
-
opts = {:remove_assembly_wide_node => true}
-
opts.merge!(:only_node_group_info => true) if only_node_group_info
-
if return_json.eql?('true')
-
rest_ok_response assembly.info(node_id, component_id, attribute_id, opts)
-
else
-
rest_ok_response assembly.info(node_id, component_id, attribute_id, opts), :encode_into => :yaml
-
end
-
end
-
-
1
def rest__list_component_module_diffs()
-
module_id, workspace_branch, module_branch_id, repo_id = ret_request_params(:module_id, :workspace_branch, :module_branch_id, :repo_id)
-
repo = id_handle(repo_id,:repo).create_object()
-
project = get_default_project()
-
module_branch = id_handle(module_branch_id, :module_branch).create_object()
-
-
project_idh = project.id_handle()
-
opts = Opts.new(:project_idh => project_idh)
-
-
rest_ok_response AssemblyModule::Component.list_remote_diffs(model_handle(), module_id, repo, module_branch, workspace_branch, opts)
-
end
-
-
1
def rest__get_component_modules()
-
assembly = ret_assembly_object()
-
rest_ok_response assembly.get_component_modules({:get_version_info=>true})
-
end
-
-
1
def rest__rename()
-
assembly = ret_assembly_object()
-
assembly_name = ret_non_null_request_params(:assembly_name)
-
new_assembly_name = ret_non_null_request_params(:new_assembly_name)
-
-
rest_ok_response assembly.rename(model_handle(), assembly_name, new_assembly_name)
-
end
-
-
# TODO: may be cleaner if we break into list_nodes, list_components with some shared helper functions
-
1
def rest__info_about()
-
node_id, component_id, detail_level, detail_to_include = ret_request_params(:node_id, :component_id, :detail_level, :detail_to_include)
-
node_id = nil if node_id.kind_of?(String) and node_id.empty?
-
component_id = nil if component_id.kind_of?(String) and component_id.empty?
-
assembly,subtype = ret_assembly_params_object_and_subtype()
-
response_opts = Hash.new
-
if format = ret_request_params(:format)
-
format = format.to_sym
-
unless SupportedFormats.include?(format)
-
raise ErrorUsage.new("Illegal format (#{format}) specified; it must be one of: #{SupportedFormats.join(',')}")
-
end
-
end
-
-
about = ret_non_null_request_params(:about).to_sym
-
unless AboutEnum[subtype].include?(about)
-
raise ErrorUsage::BadParamValue.new(:about,AboutEnum[subtype])
-
end
-
-
opts = Opts.new(:detail_level => detail_level)
-
additional_filter_proc = nil
-
if about == :attributes
-
if format == :yaml
-
opts.merge!(:settings_form => true,:mark_unset_required => true)
-
else
-
opts.merge!(:truncate_attribute_values => true,:mark_unset_required => true)
-
end
-
-
additional_filter_opts = {
-
:tags => ret_request_params(:tags),
-
:editable => 'editable' == ret_request_params(:attribute_type)
-
}
-
additional_filter_proc = Proc.new do |e|
-
attr = e[:attribute]
-
(!attr.kind_of?(Attribute)) or !attr.filter_when_listing?(additional_filter_opts)
-
end
-
elsif about == :components
-
# if not at node level filter out components on node group members (target_refs)
-
unless node_id
-
additional_filter_proc = Proc.new do |e|
-
node = e[:node]
-
(!node.kind_of?(Node)) or !Node::TargetRef.is_target_ref?(node)
-
end
-
end
-
end
-
-
opts[:filter_proc] = Proc.new do |e|
-
if element_matches?(e,[:node,:id],node_id) and
-
element_matches?(e,[:attribute,:component_component_id],component_id)
-
if additional_filter_proc.nil? or additional_filter_proc.call(e)
-
e
-
end
-
end
-
end
-
opts.add_return_datatype!()
-
if detail_to_include
-
opts.merge!(:detail_to_include => detail_to_include.map{|r|r.to_sym})
-
opts.add_value_to_return!(:datatype)
-
end
-
-
if node_id
-
opts.merge!(:node_cmp_name => true)
-
end
-
-
data = assembly.info_about(about, opts)
-
datatype = opts.get_datatype
-
response_opts = Hash.new
-
if format == :yaml
-
response_opts.merge!(:encode_into => :yaml)
-
else
-
response_opts.merge!(:datatype => datatype)
-
end
-
rest_ok_response data, response_opts
-
end
-
1
SupportedFormats = [:yaml]
-
-
1
def rest__info_about_task()
-
assembly = ret_assembly_instance_object()
-
task_action = ret_request_params(:task_action)
-
response = assembly.get_task_template_serialized_content(task_action)
-
response_opts = Hash.new
-
if response
-
response_opts.merge!(:encode_into => :yaml)
-
else
-
response = {:message => "Task not yet generated for assembly (#{assembly.get_field?(:display_name)})"}
-
end
-
rest_ok_response response, response_opts
-
end
-
-
1
def rest__cancel_task()
-
assembly = ret_assembly_instance_object()
-
unless top_task_id = ret_request_params(:task_id)
-
unless top_task = get_most_recent_executing_task([:eq,:assembly_id,assembly.id()])
-
raise ErrorUsage.new("No running tasks found")
-
end
-
top_task_id = top_task.id()
-
end
-
cancel_task(top_task_id)
-
rest_ok_response :task_id => top_task_id
-
end
-
-
1
def rest__list_modules()
-
ids = ret_request_params(:assemblies)
-
assembly_templates = get_assemblies_from_ids(ids)
-
components = Assembly::Template.list_modules(assembly_templates)
-
-
rest_ok_response components
-
end
-
-
1
def rest__prepare_for_edit_module()
-
assembly = ret_assembly_instance_object()
-
module_type = ret_non_null_request_params(:module_type)
-
-
response =
-
case module_type.to_sym
-
when :component_module
-
module_name = ret_non_null_request_params(:module_name)
-
opts_validate = {:ret_locked_branch_sha => true}
-
namespace = AssemblyModule::Component.validate_component_module_ret_namespace(assembly,module_name,opts_validate)
-
sha = opts_validate[:ret_locked_branch_sha]
-
component_module = create_obj(:module_name,ComponentModule,namespace)
-
opts = (sha ? {:sha => sha} : {})
-
AssemblyModule::Component.prepare_for_edit(assembly,component_module,opts)
-
when :service_module
-
modification_type = ret_non_null_request_params(:modification_type).to_sym
-
opts = ret_params_hash(:task_action,:create,:base_task_action)
-
-
# TODO: support
-
if opts[:create]
-
raise ErrorUsage.new("create-workflow is not yet supported")
-
end
-
-
AssemblyModule::Service.prepare_for_edit(assembly,modification_type,opts)
-
else
-
raise ErrorUsage.new("Illegal module_type #{module_type}")
-
end
-
-
rest_ok_response response
-
end
-
-
1
def rest__promote_module_updates()
-
assembly = ret_assembly_instance_object()
-
module_type, module_name = ret_non_null_request_params(:module_type,:module_name)
-
-
unless module_type.to_sym == :component_module
-
raise Error.new("promote_module_changes only treats component_module type")
-
end
-
-
namespace = AssemblyModule::Component.validate_component_module_ret_namespace(assembly,module_name)
-
component_module = create_obj(:module_name,ComponentModule,namespace)
-
opts = ret_boolean_params_hash(:force)
-
rest_ok_response AssemblyModule::Component.promote_module_updates(assembly,component_module,opts)
-
end
-
-
1
def rest__get_component_module_info()
-
assembly = ret_assembly_instance_object()
-
module_type, module_name = ret_non_null_request_params(:module_type,:module_name)
-
-
unless module_type.to_sym == :component_module
-
raise Error.new("promote_module_changes only treats component_module type")
-
end
-
-
namespace = AssemblyModule::Component.validate_component_module_ret_namespace(assembly,module_name)
-
component_module = create_obj(:module_name,ComponentModule,namespace)
-
opts = ret_boolean_params_hash(:force)
-
-
branch_info = AssemblyModule::Component.component_module_workspace_info(assembly, component_module, opts)
-
branch_info.merge!(:assembly_name => assembly[:display_name])
-
-
rest_ok_response branch_info
-
end
-
-
1
def rest__create_component_dependency()
-
assembly = ret_assembly_instance_object()
-
cmp_template = ret_component_template(:component_template_id)
-
antecedent_cmp_template = ret_component_template(:antecedent_component_template_id)
-
type = :simple
-
AssemblyModule::Component.create_component_dependency?(type,assembly,cmp_template,antecedent_cmp_template)
-
rest_ok_response
-
end
-
-
1
AboutEnum = {
-
:instance => [:nodes,:components,:tasks,:attributes,:modules],
-
:template => [:nodes,:components,:targets]
-
}
-
1
FilterProc = {
-
:attributes => lambda{|attr|not attr[:hidden]}
-
}
-
-
1
def rest__add_ad_hoc_attribute_links()
-
assembly = ret_assembly_instance_object()
-
target_attr_term,source_attr_term = ret_non_null_request_params(:target_attribute_term,:source_attribute_term)
-
update_meta = ret_request_params(:update_meta)
-
opts = Hash.new
-
# update_meta == true is the default
-
unless !update_meta.nil? and !update_meta
-
opts.merge!(:update_meta => true)
-
end
-
AttributeLink::AdHoc.create_adhoc_links(assembly,target_attr_term,source_attr_term,opts)
-
rest_ok_response
-
end
-
-
1
def rest__delete_service_link()
-
port_link = ret_port_link()
-
Assembly::Instance::ServiceLink.delete(port_link.id_handle())
-
rest_ok_response
-
end
-
-
1
def rest__add_service_link()
-
assembly = ret_assembly_instance_object()
-
assembly_id = assembly.id()
-
input_cmp_idh = ret_component_id_handle(:input_component_id,:assembly_id => assembly_id)
-
output_cmp_idh = ret_component_id_handle(:output_component_id,:assembly_id => assembly_id)
-
opts = ret_params_hash(:dependency_name)
-
service_link_idh = assembly.add_service_link?(input_cmp_idh,output_cmp_idh,opts)
-
rest_ok_response :service_link => service_link_idh.get_id()
-
end
-
-
1
def rest__list_attribute_mappings()
-
port_link = ret_port_link()
-
# TODO: stub
-
ams = port_link.list_attribute_mappings()
-
pp ams
-
rest_ok_response
-
end
-
-
1
def rest__list_service_links()
-
assembly = ret_assembly_instance_object()
-
component_id = ret_component_id?(:component_id, :assembly_id => assembly.id())
-
context = (ret_request_params(:context) || :assembly).to_sym
-
opts = { :context => context }
-
opts.merge!(:filter => { :input_component_id => component_id }) if component_id
-
opts.merge!(:hide_assembly_wide_node => true)
-
ret = assembly.list_service_links(opts)
-
rest_ok_response ret
-
end
-
# TODO: deprecate below for above
-
1
def rest__list_connections()
-
assembly = ret_assembly_instance_object()
-
find_missing,find_possible = ret_request_params(:find_missing,:find_possible)
-
ret =
-
if find_possible
-
assembly.list_connections__possible()
-
elsif find_missing
-
raise Error.new("Deprecated")
-
else
-
raise Error.new("Deprecated")
-
end
-
rest_ok_response ret
-
end
-
-
1
def rest__list_possible_add_ons()
-
assembly = ret_assembly_instance_object()
-
rest_ok_response assembly.get_service_add_ons()
-
end
-
-
1
def rest__get_attributes()
-
filter = ret_request_params(:filter)
-
filter = filter && filter.to_sym
-
assembly = ret_assembly_instance_object()
-
rest_ok_response assembly.get_attributes_print_form(Opts.new(:filter => filter))
-
end
-
-
1
def rest__workspace_object()
-
rest_ok_response Assembly::Instance.get_workspace_object(model_handle(),{})
-
end
-
-
1
def rest__list()
-
subtype = ret_assembly_subtype()
-
result =
-
if subtype == :instance
-
opts = ret_params_hash(:filter, :detail_level, :include_namespaces)
-
opts.merge!(:remove_assembly_wide_node => true)
-
Assembly::Instance.list(model_handle(), opts)
-
else
-
project = get_default_project()
-
opts = {:version_suffix => true}.merge(ret_params_hash(:filter,:detail_level))
-
Assembly::Template.list(model_handle(),opts.merge(:project_idh => project.id_handle()))
-
end
-
rest_ok_response result
-
end
-
-
1
def rest__list_with_workspace()
-
opts = ret_params_hash(:filter)
-
rest_ok_response Assembly::Instance.list_with_workspace(model_handle(),opts)
-
end
-
-
1
def rest__print_includes()
-
assembly = ret_assembly_instance_object()
-
rest_ok_response assembly.print_includes(), :encode_into => :yaml
-
end
-
-
#### end: list and info actions ###
-
-
1
def rest__apply_attribute_settings()
-
assembly = ret_assembly_instance_object()
-
settings_hash = ret_attribute_settings_hash()
-
ServiceSetting::AttributeSettings.apply_using_settings_hash(assembly,settings_hash)
-
rest_ok_response
-
end
-
-
##
-
# Sets or creates attributes
-
# TODO: update what input can be
-
# the body has an array each element of form
-
# {:pattern => PAT, :value => VAL}
-
# pat can be one of three forms
-
# 1 - an id
-
# 2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
-
# 3 - a pattern (TODO: give syntax) that can pick out multiple vars
-
# this returns same output as info about attributes, pruned for just new ones set
-
# TODO: this is a minsnomer in that it can be used to just create attributes
-
1
def rest__set_attributes
-
assembly = ret_assembly_instance_object()
-
av_pairs = ret_params_av_pairs()
-
opts = ret_params_hash(:format, :context, :create)
-
create_options = ret_boolean_params_hash(:required, :dynamic)
-
-
if semantic_data_type = ret_request_params(:datatype)
-
unless Attribute::SemanticDatatype.isa?(semantic_data_type)
-
raise ErrorUsage.new("The term (#{semantic_data_type}) is not a valid data type")
-
end
-
create_options.merge!(:semantic_data_type => semantic_data_type)
-
end
-
-
unless create_options.empty?
-
unless opts[:create]
-
raise ErrorUsage.new("Options (#{create_options.values.join(',')}) can only be given if :create is true")
-
end
-
opts.merge!(:attribute_properties => create_options)
-
end
-
-
# update_meta == true is the default
-
update_meta = ret_request_params(:update_meta)
-
opts.merge!(:update_meta => true) unless !update_meta.nil? && !update_meta
-
-
opts.merge!(:node_attribute => true) if ret_request_params(:node_attribute)
-
opts.merge!(:component_attribute => true) if ret_request_params(:component_attribute)
-
-
rest_ok_response assembly.set_attributes(av_pairs, opts)
-
end
-
-
#### actions to update and create assembly templates
-
1
def rest__promote_to_template
-
assembly = ret_assembly_instance_object()
-
assembly_template_name, service_module_name, module_namespace = get_template_and_service_names_params(assembly)
-
-
if assembly_template_name.nil? || service_module_name.nil?
-
raise ErrorUsage.new('SERVICE-NAME/ASSEMBLY-NAME cannot be determined and must be explicitly given')
-
end
-
project = get_default_project()
-
opts = ret_symbol_params_hash(:mode)
-
-
if namespace = ret_request_params(:namespace)
-
opts.merge!(:namespace => namespace)
-
elsif ret_request_params(:use_module_namespace)
-
opts.merge!(:namespace => module_namespace)
-
end
-
-
if description = ret_request_params(:description)
-
opts.merge!(:description => description)
-
end
-
-
if local_clone_dir_exists = ret_request_params(:local_clone_dir_exists)
-
opts.merge!(:local_clone_dir_exists => local_clone_dir_exists)
-
end
-
-
service_module = Assembly::Template.create_or_update_from_instance(project, assembly, service_module_name, assembly_template_name, opts)
-
rest_ok_response service_module.ret_clone_update_info()
-
end
-
#### end: actions to update and create assembly templates
-
-
#### methods to modify the assembly instance
-
1
def rest__add_node()
-
assembly = ret_assembly_instance_object()
-
assembly_node_name = ret_non_null_request_params(:assembly_node_name)
-
node_binding_rs = node_binding_ruleset?(:node_template_identifier)
-
node_instance_idh = assembly.add_node(assembly_node_name,node_binding_rs)
-
-
rest_ok_response node_instance_idh
-
end
-
-
1
def rest__add_node_group()
-
assembly = ret_assembly_instance_object()
-
node_group_name = ret_non_null_request_params(:node_group_name)
-
node_binding_rs = node_binding_ruleset?(:node_template_identifier)
-
cardinality = ret_non_null_request_params(:cardinality)
-
node_group_idh = assembly.add_node_group(node_group_name, node_binding_rs, cardinality)
-
-
rest_ok_response node_group_idh
-
end
-
-
1
def rest__add_component()
-
assembly = ret_assembly_instance_object()
-
cmp_name, namespace = ret_request_params(:component_template_id, :namespace)
-
assembly_idh = assembly.id_handle()
-
-
cmp_mh = assembly_idh.createMH(:component)
-
unless aug_component_template = Component::Template.get_augmented_component_template(cmp_mh, cmp_name, namespace, assembly)
-
raise ErrorUsage.new("Component with identifier #{namespace.nil? ? '\'' : ('\'' + namespace + ':')}#{cmp_name}' does not exist!")
-
end
-
-
component_title = ret_component_title?(cmp_name)
-
node_id = ret_request_params(:node_id)
-
opts = ret_boolean_params_hash(:idempotent, :donot_update_workflow)
-
node_idh = node_id.empty? ? nil : ret_node_id_handle(:node_id, assembly)
-
-
new_component_idh = assembly.add_component(node_idh, aug_component_template, component_title, opts)
-
rest_ok_response(:component_id => new_component_idh.get_id())
-
end
-
-
1
def rest__add_assembly_template()
-
assembly = ret_assembly_instance_object()
-
assembly_template = ret_assembly_template_object(:assembly_template_id)
-
assembly.add_assembly_template(assembly_template)
-
rest_ok_response
-
end
-
-
1
def rest__add_service_add_on()
-
assembly = ret_assembly_instance_object()
-
add_on_name = ret_non_null_request_params(:service_add_on_name)
-
new_sub_assembly_idh = assembly.service_add_on(add_on_name)
-
rest_ok_response(:sub_assembly_id => new_sub_assembly_idh.get_id())
-
end
-
-
#### end: methods to modify the assembly instance
-
-
#### method(s) related to staging assembly template
-
1
def rest__stage()
-
target_id = ret_request_param_id_optional(:target_id, Target::Instance)
-
target = target_idh_with_default(target_id).create_object(:model_name => :target_instance)
-
-
# Special case to support Jenikins CLI orders, since we are not using shell we do not have access
-
# to element IDs. This "workaround" helps with that.
-
if service_module_id = ret_request_params(:service_module_id)
-
# this is name of assembly template
-
assembly_id = ret_request_params(:assembly_id)
-
service_module = ServiceModule.find(model_handle(:service_module), service_module_id)
-
assembly_template = service_module.get_assembly_templates().find { |template| template[:display_name].eql?(assembly_id) || template[:id] == assembly_id.to_i }
-
raise ErrorUsage, "We are not able to find assembly '#{assembly_id}' for service module '#{service_module_id}'" unless assembly_template
-
else
-
assembly_template = ret_assembly_template_object()
-
end
-
-
opts = Hash.new
-
if assembly_name = ret_request_params(:name)
-
opts[:assembly_name] = assembly_name
-
end
-
if service_settings = ret_settings_objects(assembly_template)
-
opts[:service_settings] = service_settings
-
end
-
new_assembly_obj = assembly_template.stage(target, opts)
-
-
response = {
-
:new_service_instance => {
-
:name => new_assembly_obj.display_name_print_form,
-
:id => new_assembly_obj.id()
-
}
-
}
-
rest_ok_response(response,:encode_into => :yaml)
-
end
-
-
1
def rest__deploy()
-
# stage assembly template
-
target_id = ret_request_param_id_optional(:target_id, Target::Instance)
-
target = target_idh_with_default(target_id).create_object(:model_name => :target_instance)
-
-
# Special case to support Jenikins CLI orders, since we are not using shell we do not have access
-
# to element IDs. This "workaround" helps with that.
-
if service_module_id = ret_request_params(:service_module_id)
-
# this is name of assembly template
-
assembly_id = ret_request_params(:assembly_id)
-
service_module = ServiceModule.find(model_handle(:service_module), service_module_id)
-
assembly_template = service_module.get_assembly_templates().find { |template| template[:display_name].eql?(assembly_id) || template[:id] == assembly_id.to_i }
-
raise ErrorUsage, "We are not able to find assembly '#{assembly_id}' for service module '#{service_module_id}'" unless assembly_template
-
else
-
assembly_template = ret_assembly_template_object()
-
end
-
-
-
opts = Hash.new
-
if assembly_name = ret_request_params(:name)
-
opts[:assembly_name] = assembly_name
-
end
-
if service_settings = ret_settings_objects(assembly_template)
-
opts[:service_settings] = service_settings
-
end
-
assembly_instance = assembly_template.stage(target, opts)
-
-
# see if any violations
-
violation_objects = assembly_instance.find_violations()
-
unless violation_objects.empty?
-
violation_table = violation_objects.map do |v|
-
{:type => v.type(),:description => v.description()}
-
end
-
error_data = {
-
:violations => violation_table.uniq
-
}
-
error_msg = "Assembly cannot be executed because of violations"
-
# return rest_notok_response(:code => :assembly_violations, :message => error_msg, :data => error_data)
-
end
-
-
# create task
-
task = Task.create_from_assembly_instance(assembly_instance,ret_params_hash(:commit_msg))
-
task.save!()
-
-
# TODO: this is simple but expensive way to get all teh embedded task ids filled out
-
# can replace with targeted method that does just this
-
task = Task.get_hierarchical_structure(task.id_handle())
-
# execute task
-
workflow = Workflow.create(task)
-
workflow.defer_execution()
-
-
response = {
-
:assembly_instance_id => assembly_instance.id(),
-
:assembly_instance_name => assembly_instance.display_name_print_form,
-
:task_id => task.id()
-
}
-
rest_ok_response response
-
end
-
-
1
def rest__list_settings()
-
assembly_template = ret_assembly_template_object()
-
rest_ok_response assembly_template.get_settings()
-
end
-
-
#### end: method(s) related to staging assembly template
-
-
#### creates tasks to execute/converge assemblies and monitor status
-
1
def rest__find_violations()
-
assembly = ret_assembly_instance_object()
-
violation_objects = assembly.find_violations()
-
-
violation_table = violation_objects.map do |v|
-
{:type => v.type(),:description => v.description()}
-
end.sort{|a,b|a[:type].to_s <=> b[:type].to_s}
-
-
rest_ok_response violation_table.uniq
-
end
-
-
1
def rest__create_task()
-
assembly = ret_assembly_instance_object()
-
assembly_is_stopped = assembly.any_stopped_nodes?()
-
-
if assembly_is_stopped and ret_request_params(:start_assembly).nil?
-
return rest_ok_response :confirmation_message=>true
-
end
-
-
if assembly.are_nodes_running_in_task?()
-
raise ErrorUsage, "Task is already running on requested nodes. Please wait until task is complete"
-
end
-
-
opts = ret_params_hash(:commit_msg,:task_action,:task_params)
-
if assembly_is_stopped
-
opts.merge!(:start_node_changes => true, :ret_nodes => Array.new)
-
end
-
task = Task.create_from_assembly_instance(assembly,opts)
-
task.save!()
-
-
# TODO: clean up this part since this is doing more than creating task
-
nodes_to_start = (opts[:ret_nodes]||[]).reject{|n|n[:admin_op_status] == "running"}
-
unless nodes_to_start.empty?
-
CreateThread.defer_with_session(CurrentSession.new.user_object(), Ramaze::Current::session) do
-
# invoking command to start the nodes
-
CommandAndControl.start_instances(nodes_to_start)
-
end
-
end
-
-
rest_ok_response :task_id => task.id
-
end
-
-
1
def rest__clear_tasks()
-
assembly = ret_assembly_instance_object()
-
assembly.clear_tasks()
-
rest_ok_response
-
end
-
-
#TODO: cleanup
-
1
def rest__start()
-
assembly = ret_assembly_instance_object()
-
node_pattern = ret_request_params(:node_pattern)
-
task = nil
-
-
# filters only stopped nodes for this assembly
-
nodes, is_valid, error_msg = assembly.nodes_valid_for_stop_or_start(node_pattern, :stopped)
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => [error_msg])
-
end
-
-
opts ={}
-
if (nodes.size == 1)
-
opts.merge!(:node => nodes.first)
-
else
-
opts.merge!(:nodes => nodes)
-
end
-
-
task = Task.task_when_nodes_ready_from_assembly(assembly, :assembly, opts)
-
task.save!()
-
-
# queue = SimpleActionQueue.new
-
-
user_object = CurrentSession.new.user_object()
-
CreateThread.defer_with_session(user_object, Ramaze::Current::session) do
-
# invoking command to start the nodes
-
CommandAndControl.start_instances(nodes)
-
end
-
-
# queue.set_result(:task_id => task.id)
-
rest_ok_response :task_id => task.id
-
end
-
-
1
def rest__stop()
-
assembly = ret_assembly_instance_object()
-
node_pattern = ret_request_params(:node_pattern)
-
-
nodes, is_valid, error_msg = assembly.nodes_valid_for_stop_or_start(node_pattern, :running)
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => [error_msg])
-
end
-
-
Node.stop_instances(nodes)
-
-
rest_ok_response :status => :ok
-
end
-
-
1
def rest__task_status()
-
assembly = ret_assembly_instance_object()
-
-
opts = {
-
:format => (ret_request_params(:format)||:hash).to_sym,
-
:detail_level => ret_boolean_params_hash(:summarize_node_groups)
-
}
-
response = Task::Status::Assembly.get_status(assembly.id_handle,opts)
-
rest_ok_response response
-
end
-
-
1
def rest__task_action_detail()
-
assembly = ret_assembly_instance_object()
-
action_label = ret_request_params(:message_id)
-
rest_ok_response Task::ActionResults.get_action_detail(assembly, action_label)
-
end
-
-
### command and control actions
-
1
def rest__initiate_get_log()
-
assembly = ret_assembly_instance_object()
-
params = ret_params_hash(:log_path, :start_line)
-
node_pattern = ret_params_hash(:node_identifier)
-
-
nodes = ret_matching_nodes(assembly, node_pattern)
-
nodes, is_valid, error_msg = assembly.nodes_are_up?(nodes, :running, {:what => "Tail"})
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => error_msg)
-
end
-
-
queue = initiate_action(GetLog, assembly, params, node_pattern)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__initiate_grep()
-
assembly = ret_assembly_instance_object()
-
params = ret_params_hash(:log_path, :grep_pattern, :stop_on_first_match)
-
# TODO: should use in rest call :node_identifier
-
np = ret_request_params(:node_pattern)
-
node_pattern = (np ? { :node_identifier => np } : {})
-
-
nodes = ret_matching_nodes(assembly, node_pattern)
-
nodes, is_valid, error_msg = assembly.nodes_are_up?(nodes, :running, {:what => "Grep"})
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => error_msg)
-
end
-
-
queue = initiate_action(Grep, assembly, params, node_pattern)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__initiate_get_netstats()
-
assembly = ret_assembly_instance_object()
-
params = {}
-
node_pattern = ret_params_hash(:node_id)
-
-
nodes = ret_matching_nodes(assembly, node_pattern)
-
nodes, is_valid, error_msg = assembly.nodes_are_up?(nodes, :running, :what => 'Get netstats')
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => error_msg)
-
end
-
-
queue = initiate_action(GetNetstats, assembly, params, node_pattern)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def initiate_action_agent()
-
node = create_obj(:node_id, ::DTK::Node)
-
params = ret_params_hash(:bash_command)
-
-
params.merge!(
-
:action_agent_request => {
-
:env_vars => { :HARIS => 'WORKS', :NESTO => 21 },
-
:execution_list => [
-
{
-
:type => 'syscall',
-
#:command => "script -qfc 'JAVA_HOME=\"/usr/lib/jvm/java-1.7.0-openjdk-1.7.0.75.x86_64\" HADOOP_HOME=\"/usr/lib/hadoop\" HADOOP_CONF_DIR=\"/etc/hadoop/conf/\" /usr/local/maven/bin/mvn verify -f /etc/puppet/modules/action_module/dtk/bigtop_tests/bigtop-tests/test-execution/smokes/hadoop/pom.xml'",
-
:command => "date",
-
:if => 'echo works!'
-
},
-
# {
-
# :type => 'syscall',
-
# :command => 'more /root/thor/README.md'
-
# }
-
],
-
:positioning2 => [
-
{
-
:type => 'file',
-
:source => {
-
:type => 'git',
-
:url => "https://github.com/erikhuda/thor.git",
-
:ref => 'master'
-
},
-
:target => {
-
:path => "/root/thor"
-
},
-
},
-
{
-
:type => 'file',
-
:source => {
-
:type => 'in_payload',
-
:content => "Hello WORLD!"
-
},
-
:target => {
-
:path => "/root/test-folder/site-stage-1-invocation-1.pp"
-
}
-
}]})
-
-
queue = initiate_action_with_nodes(ActionAgent, [node], params)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__initiate_get_ps()
-
assembly = ret_assembly_instance_object()
-
params = Hash.new
-
node_pattern = ret_params_hash(:node_id)
-
-
nodes = ret_matching_nodes(assembly, node_pattern)
-
nodes, is_valid, error_msg = assembly.nodes_are_up?(nodes, :running, {:what => "Get ps"})
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => error_msg)
-
end
-
-
queue = initiate_action(GetPs, assembly, params, node_pattern)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__initiate_ssh_pub_access()
-
assembly = ret_assembly_instance_object()
-
params = ret_params_hash(:rsa_pub_name, :rsa_pub_key, :system_user)
-
agent_action = ret_non_null_request_params(:agent_action).to_sym
-
target_nodes = ret_matching_nodes(assembly)
-
-
# check existance of key and system user in database
-
system_user, key_name = params[:system_user], params[:rsa_pub_name]
-
nodes = Component::Instance::Interpreted.find_candidates(assembly, system_user, key_name, agent_action, target_nodes)
-
-
queue = initiate_action_with_nodes(SSHAccess,nodes,params.merge(:agent_action => agent_action)) do
-
# need to put sanity checking in block under initiate_action_with_nodes
-
if target_nodes_option = ret_request_params(:target_nodes)
-
unless target_nodes_option.empty?
-
raise ErrorUsage.new("Not implemented when target nodes option given")
-
end
-
end
-
-
if agent_action == :revoke_access && nodes.empty?
-
raise ErrorUsage.new("Access #{target_nodes.empty? ? '' : 'on given nodes'} is not granted to system user '#{system_user}' with name '#{key_name}'")
-
end
-
if agent_action == :grant_access && nodes.empty?
-
raise ErrorUsage.new("Nodes already have access to system user '#{system_user}' with name '#{key_name}'")
-
end
-
end
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__list_ssh_access()
-
assembly = ret_assembly_instance_object()
-
rest_ok_response Component::Instance::Interpreted.list_ssh_access(assembly)
-
end
-
-
1
def rest__initiate_execute_tests()
-
node_id = ret_request_params(:node_id)
-
component = ret_non_null_request_params(:components)
-
assembly = ret_assembly_instance_object()
-
project = get_default_project()
-
-
# Filter only running nodes for this assembly
-
nodes = assembly.get_leaf_nodes(:cols => [:id,:display_name,:type,:external_ref,:hostname_external_ref, :admin_op_status])
-
nodes, is_valid, error_msg = assembly.nodes_are_up?(nodes, :running, {:what => "Serverspec tests"})
-
-
unless is_valid
-
Log.info(error_msg)
-
return rest_ok_response(:errors => error_msg)
-
end
-
-
# Filter node if execute tests is started from the specific node
-
nodes.select! { |node| node[:id] == node_id.to_i } unless node_id.nil?
-
if nodes.empty?
-
return rest_ok_response(:errors => "Unable to execute tests. Provided node is not valid!")
-
end
-
-
params = {:nodes => nodes, :component => component, :agent_action => :execute_tests, :project => project, :assembly_instance => assembly}
-
queue = initiate_execute_tests(ExecuteTests, params)
-
return rest_ok_response(:errors => queue.error) if queue.error
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__get_action_results()
-
# TODO: to be safe need to garbage collect on ActionResultsQueue in case miss anything
-
action_results_id = ret_non_null_request_params(:action_results_id)
-
ret_only_if_complete = ret_request_param_boolean(:return_only_if_complete)
-
disable_post_processing = ret_request_param_boolean(:disable_post_processing)
-
sort_key = ret_request_params(:sort_key)
-
-
if ret_request_param_boolean(:using_simple_queue)
-
rest_ok_response SimpleActionQueue.get_results(action_results_id)
-
else
-
if sort_key
-
sort_key = sort_key.to_sym
-
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing, sort_key)
-
else
-
rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing)
-
end
-
end
-
end
-
### end: mcollective actions
-
-
# TODO: got here in cleanup of rest calls
-
-
1
def rest__list_smoketests()
-
assembly = ret_assembly_object()
-
rest_ok_response assembly.list_smoketests()
-
end
-
-
1
def test_get_items(id)
-
assembly = id_handle(id,:component).create_object()
-
item_list = assembly.get_items()
-
-
return {
-
:data=>item_list
-
}
-
end
-
-
1
def search
-
params = request.params.dup
-
cols = model_class(:component).common_columns()
-
-
filter_conjuncts = params.map do |name,value|
-
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
-
end.compact
-
-
# restrict results to belong to library and not nested in assembly
-
filter_conjuncts += [[:eq,:type,"composite"],[:neq,:library_library_id,nil],[:eq,:assembly_id,nil]]
-
sp_hash = {
-
:cols => cols,
-
:filter => [:and] + filter_conjuncts
-
}
-
component_list = Model.get_objs(model_handle(:component),sp_hash).each{|r|r.materialize!(cols)}
-
-
i18n = get_i18n_mappings_for_models(:component)
-
component_list.each_with_index do |model,index|
-
component_list[index][:model_name] = :component
-
component_list[index][:ui] ||= {}
-
component_list[index][:ui][:images] ||= {}
-
# name = component_list[index][:display_name]
-
name = Assembly.pretty_print_name(component_list[index])
-
title = name.nil? ? "" : i18n_string(i18n,:component,name)
-
-
# TODO: change after implementing all the new types and making generic icons for them
-
model_type = 'service'
-
model_sub_type = 'db'
-
model_type_str = "#{model_type}-#{model_sub_type}"
-
prefix = "#{R8::Config[:base_images_uri]}/v1/componentIcons"
-
png = component_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
-
component_list[index][:image_path] = "#{prefix}/#{png}"
-
-
component_list[index][:i18n] = title
-
end
-
-
return {:data=>component_list}
-
end
-
-
1
def get_tree(id)
-
return {:data=>'some tree data goes here'}
-
end
-
-
1
def get_assemblies_from_ids(ids)
-
assemblies = []
-
ids.each do |id|
-
assembly = id_handle(id.to_i,:component).create_object(:model_name => :assembly_template)
-
assemblies << assembly
-
end
-
-
return assemblies
-
end
-
-
# TODO: unify with clone(id)
-
# clone assembly from library to target
-
1
def stage()
-
target_idh = target_idh_with_default(request.params["target_id"])
-
assembly_id = ret_request_param_id(:assembly_id,::DTK::Assembly::Template)
-
-
# TODO: if naem given and not unique either reject or generate a -n suffix
-
assembly_name = ret_request_params(:name)
-
-
id_handle = id_handle(assembly_id)
-
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
override_attrs = Hash.new
-
override_attrs[:display_name] = assembly_name if assembly_name
-
-
target_object = target_idh.create_object()
-
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
-
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
-
id = new_assembly_obj && new_assembly_obj.id()
-
-
# compute ui positions
-
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
-
# TODO: this does not leverage assembly node relative positions
-
nested_objs[:nodes].each do |node|
-
target_object.update_ui_for_new_item(node[:id])
-
end
-
rest_ok_response(:assembly_id => id)
-
end
-
-
# clone assembly from library to target
-
1
def clone(id)
-
handle_errors do
-
id_handle = id_handle(id)
-
hash = request.params
-
target_id_handle = nil
-
if hash["target_id"] and hash["target_model_name"]
-
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
-
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
-
else
-
Log.info("not implemented yet")
-
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
-
end
-
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
-
target_object = target_id_handle.create_object()
-
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
-
new_assembly_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
-
id = new_assembly_obj && new_assembly_obj.id()
-
nested_objs = new_assembly_obj.get_node_assembly_nested_objects()
-
-
# just want external ports
-
(nested_objs[:nodes]||[]).each{|n|(n[:ports]||[]).reject!{|p|p[:type] == "component_internal"}}
-
-
# TODO: ganglia hack: remove after putting this info in teh r8 meta files
-
(nested_objs[:nodes]||[]).each do |n|
-
(n[:ports]||[]).each do |port|
-
if port[:display_name] =~ /ganglia__server/
-
port[:location] = "east"
-
elsif port[:display_name] =~ /ganglia__monitor/
-
port[:location] = "west"
-
end
-
end
-
end
-
-
# TODO: get node positions going for assemblies
-
# compute uui positions
-
parent_id = request.params["parent_id"]
-
assembly_left_pos = request.params["assembly_left_pos"]
-
# node_list = get_objects(:node,{:assembly_id=>id})
-
-
dc_hash = get_object_by_id(parent_id,:datacenter)
-
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
-
-
# get the top most item in the list to set new positions
-
top_node = {}
-
top_most = 2000
-
-
# node_list.each do |node|
-
nested_objs[:nodes].each do |node|
-
# node = create_object_from_id(node_hash[:id],:node)
-
ui = node.get_ui_info(dc_hash)
-
if ui and (ui[:top].to_i < top_most.to_i)
-
left_diff = assembly_left_pos.to_i - ui[:left].to_i
-
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
-
top_most = ui[:top]
-
end
-
end
-
-
nested_objs[:nodes].each_with_index do |node,i|
-
ui = node.get_ui_info(dc_hash)
-
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
-
if ui
-
if node[:id] == top_node[:id]
-
ui[:left] = assembly_left_pos.to_i
-
else
-
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
-
end
-
end
-
node.update_ui_info!(ui,dc_hash)
-
nested_objs[:nodes][i][:assembly_ui] = ui
-
end
-
-
nested_objs[:port_links].each_with_index do |link,i|
-
nested_objs[:port_links][i][:ui] ||= {
-
:type => R8::Config[:links][:default_type],
-
:style => R8::Config[:links][:default_style]
-
}
-
end
-
-
return {:data=>nested_objs}
-
# TODO: clean this up,hack to update UI params for newly cloned object
-
# update_from_hash(id,{:ui=>hash["ui"]})
-
-
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
-
-
if hash["model_redirect"]
-
base_redirect = "/xyz/#{hash["model_redirect"]}/#{hash["action_redirect"]}"
-
redirect_id = hash["id_redirect"].match(/^\*/) ? id.to_s : hash["id_redirect"]
-
redirect_route = "#{base_redirect}/#{redirect_id}"
-
request_params = ''
-
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
-
request.params.each do |name,value|
-
if !expected_params.include?(name)
-
request_params << '&' if request_params != ''
-
request_params << "#{name}=#{value}"
-
end
-
end
-
ajax_request? ? redirect_route += '.json' : nil
-
redirect_route << URI.encode("?#{request_params}") if request_params != ''
-
else
-
redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
-
ajax_request? ? redirect_route += '.json' : nil
-
end
-
-
redirect redirect_route
-
end
-
end
-
-
end
-
end
-
-
1
module XYZ
-
1
class AttributeController < AuthController
-
1
def get_datatypes()
-
datatypes = Attribute::Datatype.ret_datatypes()
-
{:data => datatypes}
-
end
-
-
1
def get(attribute_id)
-
attr_def = create_object_from_id(attribute_id).get_attribute_def()
-
{:data => attr_def}
-
end
-
-
# Haris & Amar: Sets attribute value by attribute ID - Currently used for setting module component attribute default value
-
1
def rest__set()
-
attr_type = ret_non_null_request_params(:attribute_type)
-
attribute_id, attribute_value, module_id = ret_non_null_request_params(:attribute_id, :attribute_value, "#{attr_type}_id".to_sym)
-
attribute_instance = Attribute.get_attribute_from_identifier(attribute_id, model_handle(), module_id)
-
# attribute_instance = id_handle(attribute_id, :attribute).create_object(:model_name => :attribute)
-
attribute_instance.set_attribute_value(attribute_value)
-
-
rest_ok_response(:attribute_id => attribute_id)
-
end
-
-
# TODO: cleanup so dont have as much duplication with what is on init; wrote here becse not all cols for attribute save/update are actual columns
-
# update or create depending on whether id is in post content
-
1
def save(explicit_hash=nil,opts={})
-
hash = explicit_hash || request.params.dup
-
### special fields
-
id = hash.delete("id")
-
id = nil if id.kind_of?(String) and id.empty?
-
parent_id = hash.delete("parent_id")
-
parent_model_name = hash.delete("parent_model_name")
-
model_name = hash.delete("model")
-
name = hash.delete("name") || hash["display_name"]
-
redirect = (not (hash.delete("redirect").to_s == "false"))
-
-
# TODO: revisit during cleanup, return_model used for creating links
-
rm_val = hash.delete("return_model")
-
return_model = rm_val && rm_val == "true"
-
-
if id
-
# update
-
update_from_hash(id.to_i,hash)
-
else
-
# create
-
# TODO: cleanup confusion over hash and string leys
-
hash.merge!({:display_name => name}) unless (hash.has_key?(:display_name) or hash.has_key?("display_name"))
-
parent_id_handle = nil
-
create_hash = nil
-
if parent_id
-
parent_id_handle = id_handle(parent_id,parent_model_name)
-
create_hash = {model_name.to_sym => {name => hash}}
-
else
-
parent_id_handle = top_level_factory_id_handle()
-
create_hash = {name.to_sym => hash}
-
end
-
new_id = create_from_hash(parent_id_handle,create_hash)
-
id = new_id if new_id
-
end
-
-
if return_model
-
return {:data=> get_object_by_id(id)}
-
end
-
-
return id if opts[:return_id]
-
redirect "/xyz/#{model_name()}/display/#{id.to_s}" if redirect
-
end
-
-
1
def list_under_node(node_id=nil)
-
filter = nil
-
cols = [:id,:display_name,:value_actual,:value_derived,:data_type,:semantic_type]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => node_id.to_i) if node_id
-
-
raw_attribute_list = ds.all
-
attribute_list = AttributeComplexType.flatten_attribute_list(raw_attribute_list)
-
-
cols = [:id,:display_name,:value_actual,:value_derived,:data_type,:semantic_type]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => node_id.to_i) if node_id
-
# TODO: also filter out when component is not feature
-
# pp ds.all
-
-
-
action_name = "list_qualified_attribute_name_under_node"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign("attribute_list",attribute_list)
-
return {:content => tpl.render()}
-
end
-
-
1
def ports_under_node(node_id=nil)
-
filter = [:and,[:eq,:is_port,true],[:eq,:port_is_external,true]]
-
cols = [:id,:display_name,:value_derived,:value_asserted]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => node_id.to_i) if node_id
-
port_list = ds.all
-
port_list.each do |el|
-
val = el[:attribute_value]
-
el[:value] = (val.kind_of?(Hash) or val.kind_of?(Array)) ? JSON.generate(val) : val
-
end
-
action_name = "list_ports_under_node"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign("port_list",port_list)
-
return {:content => tpl.render()}
-
end
-
-
1
def edit_under_node(node_id=nil)
-
filter = nil
-
cols = [:id,:display_name,:value_actual,:value_derived,:data_type,:semantic_type]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => node_id.to_i) if node_id
-
-
raw_attribute_list = ds.all
-
attribute_list = AttributeComplexType.flatten_attribute_list(raw_attribute_list)
-
# add name and attr_id from :qualified_attribute_name_under_node and :qualified_attribute_id_under_node
-
attribute_list.each do |el|
-
el[:attr_id] = el[:qualified_attribute_id_under_node]
-
el[:name] = el[:qualified_attribute_name_under_node]
-
end
-
# order attribute list :qualified_attribute_name_under_node
-
ordered_attr_list = attribute_list.sort{|a,b|a[:name] <=> b[:name]}
-
-
action_name = "test_node_level_edit"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign("id",node_id.to_s) if node_id
-
tpl.assign("attribute_list",ordered_attr_list)
-
return {:content => tpl.render()}
-
end
-
-
-
1
def list_under_datacenter(datacenter_id=nil)
-
datacenter_id = IDHandle[:c => ret_session_context_id(), :model_name => :datacenter, :uri => "/datacenter/dc1"].get_id() unless datacenter_id
-
filter = nil
-
cols = [:id,:display_name,:value_actual,:value_derived,:data_type,:semantic_type]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(SQL::ColRef.coalesce(:param_node_group_datacenter_id,:param_node_datacenter_id) => datacenter_id)
-
-
raw_attribute_list = ds.all
-
attribute_list = AttributeComplexType.flatten_attribute_list(raw_attribute_list)
-
-
action_name = "list_qualified_attribute_name"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign("attribute_list",attribute_list)
-
return {:content => tpl.render()}
-
end
-
-
# TODO deprecate
-
1
def list_for_component_display()
-
search_object = ret_search_object_in_request()
-
raise Error.new("no search object in request") unless search_object
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
-
# TODO: should we be using default action name
-
action_name = :list
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
-
set_template_defaults_for_list!(tpl)
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("#{model_name()}_list",model_list)
-
-
return {:content => tpl.render()}
-
end
-
-
1
def wspace_node_display(node_id=nil)
-
=begin
-
pp ')))))))))))))))))))))))))))))'
-
pp request
-
pp ')))))))))))))))))))))))))))))'
-
=end
-
filter = nil
-
cols = [:id,:display_name,:value_actual,:value_derived,:data_type,:semantic_type]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => node_id.to_i) if node_id
-
raw_attribute_list = ds.all
-
attribute_list = AttributeComplexType.flatten_attribute_list(raw_attribute_list)
-
# add name and attr_id from :qualified_attribute_name_under_node and :qualified_attribute_id_under_node
-
-
attribute_list.each do |el|
-
name = el[:qualified_attribute_name_under_node].gsub('][',' ')
-
name = name.gsub('[',' ')
-
name = name.gsub(']',' ')
-
name_parts = name.split(' ')
-
el[:display_name] = name_parts[(name_parts.length-1)]
-
el[:value] = el[:attribute_value]
-
end
-
# order attribute list :qualified_attribute_name_under_node
-
ordered_attr_list = attribute_list.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
-
# tpl = R8Tpl::TemplateR8.new("#{model_name}/wspace_node_display",user_context())
-
tpl = R8Tpl::TemplateR8.new("attribute/wspace_node_display",user_context())
-
tpl.assign(:model_name,model_name)
-
tpl.assign(:node_id,node_id)
-
tpl.assign(:_app,app_common())
-
tpl.assign(:attribute_list,ordered_attr_list)
-
return {
-
:content => tpl.render(),
-
:panel => 'wspace-dock-body'
-
}
-
end
-
-
1
def wspace_node_edit(node_id=nil)
-
filter = nil
-
cols = [:id,:display_name,:value_actual,:value_derived,:data_type,:semantic_type]
-
field_set = Model::FieldSet.new(model_name,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => node_id.to_i) if node_id
-
-
raw_attribute_list = ds.all
-
attribute_list = AttributeComplexType.flatten_attribute_list(raw_attribute_list)
-
# add name and attr_id from :qualified_attribute_name_under_node and :qualified_attribute_id_under_node
-
-
attribute_list.each do |el|
-
name = el[:qualified_attribute_name_under_node].gsub('][',' ')
-
name = name.gsub('[',' ')
-
name = name.gsub(']',' ')
-
name_parts = name.split(' ')
-
el[:display_name] = name_parts[(name_parts.length-1)]
-
el[:value] = el[:attribute_value]
-
end
-
# order attribute list :qualified_attribute_name_under_node
-
ordered_attr_list = attribute_list.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
-
# tpl = R8Tpl::TemplateR8.new("#{model_name}/wspace_node_edit",user_context())
-
tpl = R8Tpl::TemplateR8.new("attribute/wspace_node_edit",user_context())
-
tpl.assign(:model_name,model_name)
-
tpl.assign(:node_id,node_id)
-
tpl.assign(:_app,app_common())
-
tpl.assign(:attribute_list,ordered_attr_list)
-
return {
-
:content => tpl.render(),
-
:panel => 'wspace-dock-body'
-
}
-
end
-
end
-
end
-
1
module XYZ
-
1
class Attribute_linkController < AuthController
-
-
# deprecate for Port_linkController#save
-
1
def save(explicit_hash=nil,opts={})
-
raise Error.new("TODO: this is now deprecated: PortLink.create_port_and_attr_links__clone_if_needed has changed")
-
hash = explicit_hash || request.params
-
return Error.new("not implemented update of port link") if hash["id"]
-
-
port_link_hash = {
-
:input_id => hash["input_id"].to_i,
-
:output_id => hash["output_id"].to_i
-
}
-
-
temp_link_id = hash["temp_link_id"]
-
-
handle_errors do
-
parent_id_handle = id_handle(hash["parent_id"],hash["parent_model_name"])
-
# TODO: many hacks to return new interface to front end
-
link = PortLink.create_port_and_attr_links__clone_if_needed(parent_id_handle,port_link_hash)
-
new_id = link.id
-
-
link.update_object!(:input_id,:output_id)
-
link[:ui] ||= {
-
:type => R8::Config[:links][:default_type],
-
:style => R8::Config[:links][:default_style]
-
}
-
-
input_port = create_object_from_id(link[:input_id],:port)
-
input_port.update_and_materialize_object!(*Port.common_columns())
-
-
=begin TODO: needs to be removed or modified
-
port_merge_info = port_update[:merged_external_ports].first
-
if not port_update[:new_l4_ports].empty?
-
input_port.merge!(:update_info => "replace_with_new", :replace_id => port_merge_info[:external_port_id])
-
elsif port_merge_info and not port_merge_info.empty?
-
input_port.merge!(:update_info => "merge", :merge_id => port_merge_info[:external_port_id])
-
else
-
input_port.merge!(:update_info => "no_change")
-
end
-
=end
-
input_port.merge!(:update_info => "no_change")
-
-
output_port = create_object_from_id(link[:output_id],:port)
-
output_port.update_and_materialize_object!(*Port.common_columns())
-
# only new ports created on input side
-
output_port.merge!(:update_info => "no_change")
-
-
ret = {
-
:temp_link_id => temp_link_id,
-
:link => link,
-
:input_port => input_port,
-
:output_port => output_port,
-
}
-
{:data=>ret}
-
end
-
end
-
-
1
def list_on_node_ports(node_id=nil)
-
aux_list_on_node_ports(node_id ? [node_id] : nil)
-
end
-
-
1
def get_under_context_list(explicit_hash=nil)
-
-
hash = explicit_hash || request.params
-
context_list = JSON.parse(hash["context_list"])
-
item_id_handles = context_list.map{|x|id_handle(x["id"].to_i,x["model"].to_sym)}
-
link_list = Target.get_links(item_id_handles)
-
return {'data'=>link_list}
-
end
-
-
1
def get_under_context(explicit_hash=nil)
-
hash = explicit_hash || request.params
-
=begin
-
2) Get all links from a single object: <base uri>/attribute_link/get_under_context
-
Params
-
-<query filters>
-
-id = 9493llskc393
-
-model = node
-
=end
-
raise Error.new("id not given") unless hash["id"]
-
raise Error.new("only node type treated at this time") unless hash["model"] == "node"
-
-
aux_list_on_node_ports([hash["id"].to_i])
-
end
-
-
# TODO: temp
-
1
def aux_list_on_node_ports(node_ids)
-
filter = node_ids ? [:and, [:oneof, :id, node_ids]] : nil
-
cols = [:id,:display_name,:deprecate_port_links]
-
field_set = Model::FieldSet.new(:node,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(SQL.not(SQL::ColRef.coalesce(:other_end_output_id,:other_end_input_id) => nil))
-
-
raw_link_list = ds.all
-
link_list = Array.new
-
raw_link_list.each do |el|
-
component_name = el[:component][:display_name].gsub(/::.+$/,"")
-
port_name = Aux.put_in_bracket_form([component_name] + Aux.tokenize_bracket_name(el[:attribute][:display_name]))
-
type = (el[:attribute_link]||{})[:type]||(el[:attribute_link2]||{})[:type]
-
hidden = (el[:attribute_link]||{})[:hidden].nil? ? (el[:attribute_link2]||{})[:hidden] : (el[:attribute_link]||{})[:hidden]
-
other_end_id = (el[:attribute_link]||{})[:other_end_output_id]||(el[:attribute_link2]||{})[:other_end_input_id]
-
port_dir = el[:attribute_link] ? "input" : "output"
-
link_list << {
-
:node_id => el[:id],
-
:node_name => el[:display_name],
-
:port_id => el[:attribute][:id],
-
:port_name => port_name,
-
:type => type,
-
:port_dir => port_dir,
-
:hidden => hidden,
-
:other_end_id => other_end_id
-
}
-
end
-
pp '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'
-
pp link_list
-
-
action_name = "list_on_node_ports"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign("link_list",link_list)
-
return {:content => tpl.render()}
-
end
-
end
-
end
-
1
module DTK
-
1
class ComponentController < AuthController
-
1
helper :assembly_helper
-
-
1
def rest__delete()
-
id = ret_non_null_request_params(:id)
-
Model.delete_instance(id_handle(id))
-
rest_ok_response
-
end
-
-
1
def rest__list()
-
project = get_default_project()
-
ignore = ret_request_params(:ignore)
-
assembly_instance = ret_assembly_instance_object?()
-
opts = Opts.new(:project_idh => project.id_handle())
-
opts.merge?(:assembly_instance => assembly_instance)
-
opts.merge?(:ignore => ignore)
-
rest_ok_response Component::Template.list(model_handle(),opts)
-
end
-
-
# TODO: non rest routes; cleanup or remove
-
1
def delete()
-
id = ret_non_null_request_params(:id)
-
Model.delete_instance(id_handle(id))
-
{:data => {:id=>id,:result=>true}}
-
end
-
-
-
-
1
def get_attributes_for_attr_mappings(component_id)
-
component = create_object_from_id(component_id)
-
to_set = {}
-
attr_list = component.get_attributes_unraveled(to_set,:flatten_nil_value => true)
-
{:data => attr_list}
-
end
-
-
1
def get_possible_link_defs(id)
-
component = create_object_from_id(id)
-
poss_link_defs = ComponentTypeHierarchy.possible_link_defs(component)
-
{:data => poss_link_defs}
-
end
-
-
1
def get_posible_link_def_remote_components(link_def_type)
-
# TODO: searching in user's library
-
library_idh = Model.get_objs(model_handle(:library),{:cols => [:id]}).first.id_handle
-
poss_remote_cmps = ComponentTypeHierarchy.possible_link_def_remote_components(link_def_type,library_idh)
-
{:data =>poss_remote_cmps}
-
end
-
-
1
def link_defs_editor(id)
-
component = create_object_from_id(id)
-
possible_link_defs = ComponentTypeHierarchy.possible_link_defs(component)
-
possible_link_defs = Array.new
-
possible_link_defs[0] = {:type=>:database,:i18n=>'Database'}
-
-
tpl = R8Tpl::TemplateR8.new("component/link_def_editor",user_context())
-
tpl.assign(:possible_link_defs,possible_link_defs)
-
-
return {
-
:content=>tpl.render(),
-
:panel=>request.params["panel_id"]
-
}
-
end
-
-
1
def get_by_type(type)
-
# TODO: searching in user's library
-
library_idh = Model.get_objs(model_handle(:library),{:cols => [:id]}).first.id_handle
-
poss_remote_cmps = ComponentTypeHierarchy.possible_link_def_remote_components(type,library_idh)
-
pp poss_remote_cmps
-
{:data =>poss_remote_cmps}
-
end
-
-
1
def get(id)
-
component = create_object_from_id(id)
-
comp = component.get_obj_with_common_cols()
-
return {:data=>comp}
-
end
-
-
1
def search
-
params = request.params.dup
-
cols = model_class(:component).common_columns()
-
-
filter_conjuncts = params.map do |name,value|
-
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
-
end.compact
-
-
filter_conjuncts += [[:neq,:type,"composite"],[:or, [:neq,:project_project_id,nil],[:neq,:library_library_id,nil]],[:eq,:assembly_id,nil]]
-
# MOD_RESTRUCT: when deprecate component templates in library switch above with below
-
# restrict results to belong to project and not nested in assembly
-
# filter_conjuncts += [[:neq,:type,"composite"],[:neq,:project_project_id,nil],[:eq,:assembly_id,nil]]
-
-
-
sp_hash = {
-
:cols => cols,
-
:filter => [:and] + filter_conjuncts
-
}
-
component_list = Model.get_objs(model_handle(:component),sp_hash).each{|r|r.materialize!(cols)}
-
# MOD_RESTRUCT: when deprecate component templates in library remove below
-
ndx_component_list = Hash.new
-
component_list.each do |r|
-
ndx = r[:display_name]
-
if ndx_component_list[ndx].nil? or r[:library_library_id]
-
ndx_component_list[ndx] = r
-
end
-
end
-
component_list = ndx_component_list.values()
-
-
-
i18n = get_i18n_mappings_for_models(model_name)
-
component_list.each_with_index do |model,index|
-
component_list[index][:model_name] = model_name
-
body_value = ''
-
component_list[index][:ui] ||= {}
-
component_list[index][:ui][:images] ||= {}
-
name = component_list[index][:display_name]
-
title = name.nil? ? "" : i18n_string(i18n,:component,name)
-
-
=begin
-
# TDOO: temporary to distingusih between chef and puppet components
-
if model_name == :component
-
if config_agent_type = component_list[index][:config_agent_type]
-
title += " (#{config_agent_type[0].chr})"
-
end
-
end
-
=end
-
-
# TODO: change after implementing all the new types and making generic icons for them
-
model_type = 'service'
-
model_sub_type = 'db'
-
model_type_str = "#{model_type}-#{model_sub_type}"
-
prefix = "#{R8::Config[:base_images_uri]}/v1/componentIcons"
-
png = component_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
-
component_list[index][:image_path] = "#{prefix}/#{png}"
-
-
component_list[index][:i18n] = title
-
-
=begin
-
img_value = model_list[index][:ui][:images][:tnail] ?
-
'<div class="img_wrapper"><img title="'+title+'"src="'+R8::Config[:base_images_uri]+'/'+model_name+'Icons/'+model_list[index][:ui][:images][:tnail]+'"/></div>' :
-
''
-
body_value = img_value
-
-
body_value == '' ? body_value = model_list[index][:display_name] : nil
-
model_list[index][:body_value] = body_value
-
=end
-
end
-
{:data=>component_list}
-
end
-
-
1
def clone(id)
-
handle_errors do
-
id_handle = id_handle(id)
-
hash = request.params
-
target_id_handle = nil
-
if hash["target_id"] and hash["target_model_name"]
-
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
-
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
-
else
-
Log.info("not implemented yet")
-
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
-
end
-
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
-
target_object = target_id_handle.create_object()
-
-
# TODO: push in logic that forces us here to pass in real cols and then materialize
-
clone_opts = {
-
:ret_new_obj_with_cols => Component.common_real_columns(),
-
:outermost_ports => Array.new
-
}
-
component_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
-
component_obj.materialize!(Component.common_columns())
-
-
# TODO: ganglia hack: remove after putting this info in teh r8 meta files
-
if component_obj[:display_name] == "ganglia__server"
-
(clone_opts[:outermost_ports]||[]).each{|x|x[:location] = "east"}
-
elsif component_obj[:display_name] == "ganglia__monitor"
-
(clone_opts[:outermost_ports]||[]).each{|x|x[:location] = "west"}
-
end
-
-
data = {
-
:component => component_obj,
-
:ports => clone_opts[:outermost_ports]
-
}
-
{:data => data}
-
end
-
end
-
-
1
def ret_project_component_template(cmp_type,version)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,
-
[:eq,:version, version],
-
[:eq, :component_type, cmp_type],
-
[:neq, :project_project_id, nil]]
-
}
-
ret = Model.get_objects_from_sp_hash(model_handle(),sp_hash).first #TODO: assume just one project
-
raise Error.new("cannot find project template associated with #{cmp_type} (#{version})") unless ret
-
ret
-
end
-
1
private :ret_project_component_template
-
##############################
-
1
def edit_user
-
params = request.params.reject{|k,v| v.nil? or v.empty?}
-
Component.create_user_library_template(model_handle,params)
-
return {:content => {}}
-
end
-
-
1
def details(id)
-
component = get_object_by_id(id)
-
-
tpl = R8Tpl::TemplateR8.new("component/cfg_file_list",user_context())
-
tpl.set_js_tpl_name("component_cfg_file_list")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/details",user_context())
-
-
# img_str = '<img title="' << component[:display_name] << '"' << 'src="' << R8::Config[:base_images_uri] << '/component/Icons/'<< component[:ui][:images][:tnail] << '"/>'
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(:component,user_context())
-
component[:name] = _model_var[:i18n][component[:display_name].to_sym]
-
-
# TEMP UNTIL FULLY IMPLEMENTING DEPENDENCIES
-
supported_os_list = [
-
{:id=>12345,:name=>'Ubuntu',:version=>'10.4',:ui=>{:images=>{:icon=>'ubuntu-favicon.png'}}},
-
{:id=>12345,:name=>'Debian',:version=>'6',:ui=>{:images=>{:icon=>'debian-favicon.png'}}},
-
{:id=>12345,:name=>'Fedora',:version=>'14',:ui=>{:images=>{:icon=>'fedora-favicon.png'}}},
-
{:id=>12345,:name=>'CentOS',:version=>'5.5',:ui=>{:images=>{:icon=>'centos-favicon.png'}}},
-
{:id=>12345,:name=>'RedHat',:version=>'6',:ui=>{:images=>{:icon=>'redhat-favicon.png'}}}
-
]
-
component[:supported_os_list] = supported_os_list
-
=begin
-
config_file_list = [
-
{:id=>12345,:name=>'php.ini',:owner_id=>'1123',:owner_name=>'Rich',:created_by_id=>'12112',:created_by_name=>'Rich'},
-
{:id=>12345,:name=>'http.conf',:owner_id=>'1123',:owner_name=>'Nate',:created_by_id=>'12112',:created_by_name=>'Nate'},
-
{:id=>12345,:name=>'my.cnf',:owner_id=>'1123',:owner_name=>'Bob',:created_by_id=>'12112',:created_by_name=>'Bob'}
-
]
-
=end
-
cfg_file_list = component.get_config_files();
-
pp '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>'
-
pp cfg_file_list
-
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("component",component)
-
tpl.assign("config_file_list",cfg_file_list)
-
tpl.assign("component_images_uri",R8::Config[:component_images_uri])
-
-
run_javascript("R8.Displayview.init('#{id}');")
-
-
return {:content => tpl.render()}
-
# return {:content => ""}
-
end
-
-
1
def add_cfg_file(id)
-
tpl = R8Tpl::TemplateR8.new("component/add_cfg_file",user_context())
-
tpl.assign(:component_id,id)
-
-
return {
-
:content=>tpl.render(),
-
:panel=>request.params["panel_id"]
-
}
-
end
-
-
1
def editor(id)
-
component = create_object_from_id(id,:component)
-
field_defs = component.get_field_def()
-
pp [:field_defs,field_defs]
-
# TODO: retool include_js to take string or hash, if hash then assumed js tpl and handled differently
-
tpl = R8Tpl::TemplateR8.new("component/edit_field",user_context())
-
tpl.set_js_tpl_name("component_edit_field")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/display_field",user_context())
-
tpl.set_js_tpl_name("component_display_field")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/editor",user_context())
-
_model_var = {:i18n => get_model_i18n(model_name().to_s,user_context())}
-
tpl.assign(:_component,_model_var)
-
-
tpl.assign(:_app,app_common())
-
tpl.assign(:field_def_list,field_defs)
-
-
include_css('component-editor')
-
include_js('fields.r8')
-
-
field_defs_json = JSON.generate(field_defs)
-
run_javascript("R8.Fields.init(#{field_defs_json});")
-
-
return {
-
:content=>tpl.render(),
-
:panel=>request.params["panel_id"]
-
}
-
end
-
-
1
def save_field(id)
-
field_def_json = request.params["field_def"]
-
field_def_update_x = request.params.merge("field_def" => JSON.parse(field_def_json))
-
# convert "" to nil
-
field_def_update = field_def_update_x.inject({}) do |h,kv|
-
h.merge(kv[0] => (kv[1] && kv[1].empty?) ? nil : kv[1])
-
end
-
field_def_update["required"] = [true,"true"].include?(field_def_update["required"])
-
component = create_object_from_id(field_def_update["field_def"]["component_id"])
-
new_field_def = component.update_field_def(field_def_update)
-
-
run_javascript("R8.Fields.handleSavedField(#{JSON.generate(new_field_def)});")
-
return {}
-
end
-
-
1
def instance_list(id)
-
instance_list = get_objects(:component,{:ancestor_id=>id})
-
-
tpl = R8Tpl::TemplateR8.new("component/list",user_context())
-
_model_var = {:i18n => get_model_i18n(model_name().to_s,user_context())}
-
tpl.assign(:_component,_model_var)
-
tpl.assign(:component_list,instance_list)
-
tpl.assign(:_app,app_common())
-
-
#---------------------------------
-
search_context = 'component-list'
-
search_content = ''
-
tpl.assign(:search_content, search_content)
-
tpl.assign(:search_context, search_context)
-
-
search_object = ret_search_object_in_request()
-
tpl.assign(:list_start_prev,0)
-
tpl.assign(:list_start_next,20)
-
tpl.assign(:current_start,0)
-
-
return {
-
:panel=>request.params["panel_id"],
-
:content=>tpl.render()
-
}
-
end
-
-
1
def constraints(id)
-
# component = create_object_from_id(id,:component)
-
# constraints = component.get_constraints()
-
-
# TODO: retool include_js to take string or hash, if hash then assumed js tpl and handled differently
-
tpl = R8Tpl::TemplateR8.new("component/constraints",user_context())
-
# tpl.set_js_tpl_name("component_constraints")
-
# tpl_info = tpl.render()
-
# include_js_tpl(tpl_info[:src])
-
-
# include_css('component-editor')
-
# include_js('fields.r8')
-
-
# field_defs_json = JSON.generate(field_defs)
-
# run_javascript("R8.Fields.init(#{field_defs_json});")
-
-
return {
-
:content=>tpl.render(),
-
:panel=>request.params["panel_id"]
-
}
-
end
-
-
1
def get_cfg_file_contents(id)
-
component = get_object_by_id(id)
-
return {
-
:data=>component.get_config_file(request.params["file_asset_id"])
-
}
-
end
-
-
1
def add_cfg_file_from_upload(id)
-
component = get_object_by_id(id)
-
# redirect_route = request.params["redirect"]
-
# component_id = request.params["component_id"].to_i
-
-
upload_param = request.params["config_file"]
-
cfg_filename = upload_param[:filename]
-
tmp_file_handle = upload_param[:tempfile]
-
file_content = tmp_file_handle.read
-
tmp_file_handle.close
-
# TODO: need to clean up ways to get at and work with objects
-
# create_object_from_id,get_object_by_id,id_handle(id).create_object(), etc
-
# id_handle(id).create_object().add_config_file(cfg_filename,file_content)
-
component.add_config_file(cfg_filename,file_content)
-
# TODO: delete /tmp file File.unlink(tmp_file_path)
-
-
# pp [:test,id_handle(component_id).create_object().get_config_file(cfg_filename)]
-
-
=begin
-
pp tmp_file.path
-
new_path = R8::Config[:config_file_path]+'/'+cfg_filename
-
file_contents=IO.read(tmp_file.path)
-
-
File.open(new_path, 'w') do |f|
-
f.puts file_contents
-
end
-
=end
-
# redirect redirect_route
-
return {
-
:data=> {
-
:cfg_file_list=>component.get_config_files()
-
}
-
}
-
-
end
-
-
1
def upload_config()
-
redirect_route = request.params["redirect"]
-
component_id = request.params["component_id"].to_i
-
-
upload_param = request.params["config_file"]
-
cfg_filename = upload_param[:filename]
-
tmp_file_handle = upload_param[:tempfile]
-
file_content = tmp_file_handle.read
-
tmp_file_handle.close
-
id_handle(component_id).create_object().add_config_file(cfg_filename,file_content)
-
# TODO: delete /tmp file File.unlink(tmp_file_path)
-
-
# pp [:test,id_handle(component_id).create_object().get_config_file(cfg_filename)]
-
-
=begin
-
pp tmp_file.path
-
new_path = R8::Config[:config_file_path]+'/'+cfg_filename
-
file_contents=IO.read(tmp_file.path)
-
-
File.open(new_path, 'w') do |f|
-
f.puts file_contents
-
end
-
=end
-
redirect redirect_route
-
end
-
-
1
def config_templates(id)
-
# component = create_object_from_id(id,:component)
-
# constraints = component.get_constraints()
-
-
# TODO: retool include_js to take string or hash, if hash then assumed js tpl and handled differently
-
tpl = R8Tpl::TemplateR8.new("component/upload_config_file",user_context())
-
tpl.assign("component_id",id)
-
tpl.assign(:_app,app_common())
-
# tpl.set_js_tpl_name("component_constraints")
-
# tpl_info = tpl.render()
-
# include_js_tpl(tpl_info[:src])
-
-
# include_css('component-editor')
-
# include_js('fields.r8')
-
-
# field_defs_json = JSON.generate(field_defs)
-
# run_javascript("R8.Fields.init(#{field_defs_json});")
-
-
return {
-
:content=>tpl.render(),
-
:panel=>request.params["panel_id"]
-
}
-
end
-
-
1
def layout_test(id)
-
# assuming that request params has field type
-
# view_type = request.params["type"]||"wspace-edit" #TODO: stubbed with value wspace-edit
-
view_type = request.params["type"]||"dock_display" #TODO: stubbed with value dock_display
-
-
component = create_object_from_id(id,:component)
-
field_defs = component.get_field_def()
-
-
include_css(view_type)
-
tpl = R8Tpl::TemplateR8.new("component/#{view_type}_layout",user_context())
-
tpl.set_js_tpl_name("#{view_type}_layout")
-
-
js_tpl = tpl.render()
-
include_js_tpl(js_tpl[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/#{view_type}_group_popup",user_context())
-
tpl.set_js_tpl_name("#{view_type}_group_popup")
-
js_tpl = tpl.render()
-
include_js_tpl(js_tpl[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/layout_editor",user_context())
-
_model_var = {:i18n => get_model_i18n(model_name().to_s,user_context())}
-
tpl.assign(:_component,_model_var)
-
tpl.assign(:view_type,view_type)
-
tpl.assign(:field_def_list,field_defs)
-
-
view_type_list = [
-
{:type=>'wspace_edit',:i18n=>'Workspace Edit',:selected=>''},
-
{:type=>'dock_edit',:i18n=>'Dock Edit',:selected=>''},
-
{:type=>'dock_display',:i18n=>'Dock Display',:selected=>''}
-
]
-
view_type_list.each_with_index do |vt,i|
-
view_type_list[i][:selected] = (view_type_list[i][:type] == view_type) ? 'selected="true"' : ''
-
end
-
-
tpl.assign(:view_type_list,view_type_list)
-
-
include_css('layout-editor')
-
include_js("#{view_type}.layout_editor.r8")
-
-
layout_list = component.get_layouts(view_type)
-
# pp [:layout_list,layout_list]
-
-
tpl.assign(:layout_list,layout_list)
-
-
field_defs_json = JSON.generate(field_defs)
-
# layout_def_json = JSON.generate(layout_list[0][:def])
-
layout_json = JSON.generate(layout_list[0])
-
run_javascript("R8.LayoutEditor.init(#{layout_json},#{field_defs_json});")
-
-
return {
-
:content=>tpl.render(),
-
:panel=>request.params["panel_id"]
-
}
-
end
-
-
1
def save_layout(id)
-
hash = request.params
-
-
layout_info = {
-
:type => hash["type"]||"wspace-edit", #TODO: remove ||"wspace-edit" when value explicitly passed
-
:description => hash["description"]||"sample description", #TODO: remove ||"sample description"
-
:is_active => hash["is_active"] ? hash["is_active"] = "true" : true,
-
:def => JSON.parse(hash["def"])
-
}
-
-
component = create_object_from_id(id,:component)
-
component.add_layout(layout_info)
-
return {}
-
end
-
-
1
def publish_layout(id)
-
pp '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
-
pp request.params
-
-
return {}
-
end
-
-
##TODO ======= just for testing
-
=begin
-
def display(id,parsed_query_string=nil)
-
component = create_object_from_id(id)
-
template_name = component.save_view_in_cache?(:display,user_context())
-
tpl = R8Tpl::TemplateR8.new(template_name,user_context())
-
vals = component.get_virtual_object_attributes()
-
tpl.assign("component",vals)
-
return {:content => tpl.render()}
-
end
-
=end
-
1
def display(id,parsed_query_string=nil)
-
redirect "/xyz/component/details/#{id.to_s}"
-
end
-
-
1
def edit(id,parsed_query_string=nil)
-
component = create_object_from_id(id)
-
template_name = component.save_view_in_cache?(:edit,user_context())
-
tpl = R8Tpl::TemplateR8.new(template_name,user_context())
-
vals,ids = component.get_virtual_object_attributes(:ret_ids => true)
-
tpl.assign("component",vals)
-
tpl.assign("component_id",ids)
-
return {:content => tpl.render()}
-
end
-
-
1
def save(explicit_hash=nil)
-
attr_val_hash = explicit_hash || request.params.dup
-
# TODO: can thsi be handled another way
-
# convert empty strings to nils
-
attr_val_hash.each{|k,v|attr_val_hash[k] = nil if v.kind_of?(String) and v.empty?}
-
component_id = attr_val_hash.delete("id").to_i
-
attribute_rows = AttributeComplexType.ravel_raw_post_hash(attr_val_hash,:attribute,component_id)
-
attr_mh = ModelHandle.new(ret_session_context_id(),:attribute)
-
Attribute.update_and_propagate_attributes(attr_mh,attribute_rows)
-
redirect "/xyz/component/edit/#{component_id.to_s}"
-
end
-
=begin
-
def instance_edit_test(id,virtual_col_name=nil)
-
if virtual_col_name
-
id_handle = id_handle(id)
-
virtual_col_def = ((DB_REL_DEF[model_name]||{})[:virtual_columns]||{})[virtual_col_name.to_sym]
-
remote_col_info = (virtual_col_def||{})[:remote_dependencies]
-
raise Error.new("bad virtual_col_name #{virtual_col_name}") unless remote_col_info
-
dataset = SQL::DataSetSearchPattern.create_dataset_from_join_array(id_handle,remote_col_info)
-
pp [:debug,dataset.all]
-
end
-
-
component = create_object_from_id(id)
-
-
virtual_model_ref = id.to_s
-
tpl = R8Tpl::TemplateR8.create("component","edit",user_context(),virtual_model_ref,:meta_db)
-
vals,ids = component.get_virtual_object_attributes(:ret_ids => true)
-
tpl.assign("component",vals)
-
tpl.assign("component_id",ids)
-
-
return {:content => tpl.render()}
-
-
end
-
=end
-
1
def instance_edit_test(component_id)
-
component = create_object_from_id(component_id)
-
to_set = {}
-
attr_list_x = component.get_attributes_unraveled(to_set,:flatten_nil_value => true)
-
# TODO:" temp
-
attr_list = attr_list_x.map do |a|
-
disabled_info = {
-
:disabled_attribute => a[:is_readonly] ? "disabled" : "",
-
}
-
Aux::hash_subset(a,[:id,:name,:value,:i18n,:is_readonly]).merge(disabled_info)
-
end
-
-
# TODO strawman ordering; puts readonly at bottom
-
ordered_attr_list = attr_list.sort do |a,b|
-
if a[:disabled_attribute] == b[:disabled_attribute]
-
(a[:name]||"_") <=> (b[:name]||"_")
-
elsif a[:disabled_attribute].empty?
-
-1
-
else
-
1
-
end
-
end
-
-
tpl = R8Tpl::TemplateR8.new("component/component_edit",user_context())
-
tpl.assign("field_list",ordered_attr_list)
-
tpl.assign("component_id",component_id)
-
return {:content => tpl.render()}
-
end
-
-
1
def save_attributes_test(explicit_hash=nil)
-
attr_val_hash = explicit_hash || request.params.dup
-
# TODO: can thsi be handled another way
-
# convert empty strings to nils
-
attr_val_hash.each{|k,v|attr_val_hash[k] = nil if v.kind_of?(String) and v.empty?}
-
component_id = attr_val_hash.delete("component_id").to_i
-
component_idh = id_handle(component_id)
-
attr_mh = component_idh.create_childMH(:attribute)
-
attribute_rows = AttributeComplexType.ravel_raw_post_hash(attr_val_hash,:attribute,component_id)
-
# TODO: need way to mark which ones are instance vars vs which ones are defaults
-
Attribute.update_and_propagate_attributes(attr_mh,attribute_rows)
-
redirect "/xyz/component/instance_edit_test/#{component_id.to_s}"
-
end
-
-
####### end TODO for testing
-
-
1
def dock_edit(component_id)
-
component = create_object_from_id(component_id)
-
to_set = {}
-
attr_list_x = component.get_attributes_unraveled(to_set,:flatten_nil_value => true)
-
# TODO:" temp
-
attr_list = attr_list_x.map do |a|
-
disabled_info = {
-
:disabled_attribute => a[:is_readonly] ? "disabled" : "",
-
}
-
Aux::hash_subset(a,[:id,:name,:value,:i18n,:is_readonly]).merge(disabled_info)
-
end
-
-
# TODO strawman ordering; puts readonly at bottom
-
ordered_attr_list = attr_list.sort do |a,b|
-
if a[:disabled_attribute] == b[:disabled_attribute]
-
(a[:name]||"_") <=> (b[:name]||"_")
-
elsif a[:disabled_attribute].empty?
-
-1
-
else
-
1
-
end
-
end
-
-
tpl = R8Tpl::TemplateR8.new("dock/component_edit",user_context())
-
tpl.assign("field_list",ordered_attr_list)
-
tpl.assign("component_id",component_id)
-
return {:content => tpl.render()}
-
end
-
-
-
1
def dock_display(component_id)
-
component = create_object_from_id(component_id)
-
to_set = {}
-
attr_list = component.get_attributes_unraveled(to_set,:flatten_nil_value => true)
-
-
# TODO The ordering should not matter all that much since the views will be generated by the view defs
-
ordered_attr_list = attr_list.sort{|a,b|(a[:i18n]||"_") <=> (b[:i18n]||"_")}
-
-
tpl = R8Tpl::TemplateR8.new("component/dock_display",user_context())
-
tpl.assign("field_list",ordered_attr_list)
-
tpl.assign("component_id",component_id)
-
return {:content => tpl.render()}
-
end
-
-
# TODO: rename to save
-
1
def save_attributes(explicit_hash=nil)
-
attr_val_hash = explicit_hash || request.params.dup
-
# TODO: can thsi be handled another way
-
# convert empty strings to nils
-
attr_val_hash.each{|k,v|attr_val_hash[k] = nil if v.kind_of?(String) and v.empty?}
-
component_id = attr_val_hash.delete("component_id").to_i
-
attribute_rows = AttributeComplexType.ravel_raw_post_hash(attr_val_hash,:attribute,component_id)
-
# setting attr_mh this way so get a group id
-
attr_mh = id_handle(component_id).createMH(:attribute)
-
# TODO: need way to mark which ones are instance vars vs which ones are defaults
-
Attribute.update_and_propagate_attributes(attr_mh,attribute_rows)
-
redirect "/xyz/component/dock_edit/#{component_id.to_s}"
-
end
-
-
1
def testjsonlayout
-
tpl = R8Tpl::TemplateR8.new('component/testjson',user_context())
-
tpl.assign(:testing, 'Testing')
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
-
return {:content => tpl.render()}
-
end
-
-
-
1
def testjshello
-
tpl = R8Tpl::TemplateR8.new('component/testjshello',user_context())
-
tpl.set_js_tpl_name('testjshello')
-
tpl.assign(:testing, 'Testing JSON Call Hello')
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
-
return tpl.render()
-
end
-
-
1
def testjsgoodbye
-
tpl = R8Tpl::TemplateR8.new('component/testjsgoodbye',user_context())
-
tpl.set_js_tpl_name('testjsgoodbye')
-
tpl.assign(:testing, 'Testing JSON Call Goodbye')
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
-
return tpl.render()
-
end
-
-
end
-
-
1
def add_assembly_items(id=nil)
-
# TODO: assuming parent_id is a datacenter_id
-
parent_id = request.params["parent_id"]
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display",user_context())
-
tpl.set_js_tpl_name("node_wspace_display")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
# compute uui positions
-
assembly_left_pos = request.params["assembly_left_pos"]
-
-
node_list = get_objects(:node,{:assembly_id=>id})
-
-
dc_hash = get_object_by_id(parent_id,:datacenter)
-
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
-
# get the top most item in the list to set new positions
-
top_node = {}
-
top_most = 2000
-
-
node_list.each do |node|
-
ui = node.get_ui_info(dc_hash)
-
if ui and (ui[:top].to_i < top_most.to_i)
-
left_diff = assembly_left_pos.to_i - ui[:left].to_i
-
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
-
top_most = ui[:top]
-
end
-
end
-
-
items = Array.new
-
item_id_list = Array.new
-
node_list.each do |node|
-
item_id_list << node[:id]
-
ui = node.get_ui_info(dc_hash)
-
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
-
if ui
-
if node[:id] == top_node[:id]
-
ui[:left] = assembly_left_pos.to_i
-
else
-
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
-
end
-
end
-
node.update_ui_info!(ui,dc_hash)
-
item = {
-
:type => 'node',
-
:object => node,
-
# :toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info[:template_callback],
-
:ui => ui
-
}
-
items << item
-
end
-
-
# p_str = JSON.generate(request.params)
-
# run_javascript("alert('Added assembly, here are req params: #{p_str}');")
-
-
addItemsObj = JSON.generate(items)
-
run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
-
item_id_list_json = JSON.generate(item_id_list)
-
run_javascript("R8.Workspace.touchItems(#{item_id_list_json});")
-
-
return {}
-
end
-
-
1
def add_assembly_items_ide(id=nil)
-
# TODO: assuming parent_id is a datacenter_id
-
parent_id = request.params["parent_id"]
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display_ide",user_context())
-
tpl.set_js_tpl_name("node_wspace_display_ide")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
# compute uui positions
-
assembly_left_pos = request.params["assembly_left_pos"]
-
-
node_list = get_objects(:node,{:assembly_id=>id})
-
-
dc_hash = get_object_by_id(parent_id,:datacenter)
-
raise Error.new("Not implemented when parent_id is not a datacenter") if dc_hash.nil?
-
# get the top most item in the list to set new positions
-
top_node = {}
-
top_most = 2000
-
-
node_list.each do |node|
-
ui = node.get_ui_info(dc_hash)
-
if ui and (ui[:top].to_i < top_most.to_i)
-
left_diff = assembly_left_pos.to_i - ui[:left].to_i
-
top_node = {:id=>node[:id],:ui=>ui,:left_diff=>left_diff}
-
top_most = ui[:top]
-
end
-
end
-
-
items = Array.new
-
item_id_list = Array.new
-
node_list.each do |node|
-
item_id_list << node[:id]
-
ui = node.get_ui_info(dc_hash)
-
Log.error("no coordinates for node with id #{node[:id].to_s} in #{parent_id.to_s}") unless ui
-
if ui
-
if node[:id] == top_node[:id]
-
ui[:left] = assembly_left_pos.to_i
-
else
-
ui[:left] = ui[:left].to_i + top_node[:left_diff].to_i
-
end
-
end
-
node.update_ui_info!(ui,dc_hash)
-
item = {
-
:type => 'node',
-
:object => node,
-
# :toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info[:template_callback],
-
:ui => ui
-
}
-
items << item
-
end
-
-
# p_str = JSON.generate(request.params)
-
# run_javascript("alert('Added assembly, here are req params: #{p_str}');")
-
-
# addItemsObj = JSON.generate(items)
-
# run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
-
# item_id_list_json = JSON.generate(item_id_list)
-
# run_javascript("R8.Workspace.touchItems(#{item_id_list_json});")
-
-
ret_obj = Hash.new
-
ret_obj[:items] = items
-
ret_obj[:touch_items] = item_id_list
-
-
return {:data=>ret_obj}
-
end
-
-
end
-
1
require 'eventmachine'
-
-
1
module DTK
-
1
class Component_moduleController < AuthController
-
1
helper :module_helper
-
1
helper :remotes_helper
-
-
1
def rest__test_generate_dsl()
-
component_module = create_obj(:component_module_id)
-
dsl_created_info = component_module.test_generate_dsl()
-
STDOUT << dsl_created_info[:content] << "\n"
-
rest_ok_response
-
end
-
-
#### create and delete actions ###
-
1
def rest__create()
-
# setup needed data
-
module_name = ret_non_null_request_params(:module_name)
-
namespace = ret_request_param_module_namespace?()
-
config_agent_type = ret_config_agent_type()
-
project = get_default_project()
-
-
# local_params encapsulates local module branch params
-
opts_local_params = (namespace ? { :namespace => namespace } : {})
-
local_params = local_params(:component_module,module_name,opts_local_params)
-
-
opts_create_mod = Opts.new(
-
:config_agent_type => ret_config_agent_type()
-
)
-
module_repo_info = ComponentModule.create_module(project,local_params,opts_create_mod)[:module_repo_info]
-
-
# only when creating via import-git command
-
git_url = ret_request_params(:module_git_url)
-
unless git_url.empty?
-
add_git_url(project.model_handle(:repo_remote), module_repo_info[:repo_id], git_url)
-
end
-
-
rest_ok_response module_repo_info
-
end
-
-
1
def rest__update_from_initial_create()
-
component_module = create_obj(:component_module_id)
-
repo_id,commit_sha = ret_non_null_request_params(:repo_id,:commit_sha)
-
git_import = ret_request_params(:git_import)
-
repo_idh = id_handle(repo_id,:repo)
-
version = ret_version()
-
scaffold = ret_request_params(:scaffold_if_no_dsl)
-
opts = {:scaffold_if_no_dsl => scaffold, :do_not_raise => true, :process_provider_specific_dependencies => true}
-
opts.merge!(:commit_dsl => true) if ret_request_params(:commit_dsl)
-
-
response =
-
if git_import
-
component_module.import_from_git(commit_sha,repo_idh,version,opts)
-
else
-
component_module.import_from_file(commit_sha,repo_idh,version,opts)
-
end
-
-
rest_ok_response response
-
end
-
-
1
def rest__update_model_from_clone()
-
component_module = create_obj(:component_module_id)
-
commit_sha = ret_non_null_request_params(:commit_sha)
-
version = ret_version()
-
diffs_summary = ret_diffs_summary()
-
-
opts = Hash.new
-
if ret_request_param_boolean(:internal_trigger)
-
opts.merge!(:do_not_raise => true)
-
end
-
if ret_request_param_boolean(:force_parse)
-
opts.merge!(:force_parse=> true)
-
end
-
if ret_request_params(:set_parsed_false)
-
opts.merge!(:dsl_parsed_false => true)
-
end
-
if ret_request_params(:update_from_includes)
-
opts.merge!(:update_from_includes => true)
-
end
-
if ret_request_params(:service_instance_module)
-
opts.merge!(:service_instance_module => true)
-
end
-
if current_branch_sha = ret_request_params(:current_branch_sha)
-
opts.merge!(:current_branch_sha => current_branch_sha)
-
end
-
if force = ret_request_params(:force)
-
opts.merge!(:force => force)
-
end
-
-
# the possible keys in response are with the subkeys that are used
-
# :dsl_parse_error: ModuleDSL::ParsingError obj
-
# :dsl_updated_info:
-
# :msg
-
# :commit_sha
-
# :dsl_created_info
-
# :path
-
# :content - only if want this dsl file to be added on cleint side
-
# :external_dependencies
-
# :inconsistent
-
# :possibly_missing
-
# :ambiguous
-
rest_ok_response component_module.update_model_from_clone_changes?(commit_sha,diffs_summary,version,opts)
-
end
-
-
1
def rest__delete()
-
1
component_module = create_obj(:component_module_id)
-
1
module_info = component_module.delete_object()
-
1
rest_ok_response module_info
-
end
-
-
1
def rest__delete_version()
-
component_module = create_obj(:component_module_id)
-
version = ret_version()
-
module_info = component_module.delete_version(version)
-
rest_ok_response module_info
-
end
-
-
#### end: create and delete actions ###
-
-
#### list and info actions ###
-
1
def rest__list()
-
3
Log.info(MessageQueue.object_id)
-
3
diff = ret_request_params(:diff)
-
3
project = get_default_project()
-
3
namespace = ret_request_params(:module_namespace)
-
3
datatype = :module
-
3
remote_repo_base = ret_remote_repo_base()
-
-
3
opts = Opts.new(:project_idh => project.id_handle())
-
3
if detail = ret_request_params(:detail_to_include)
-
2
opts.merge!(:detail_to_include => detail.map{|r|r.to_sym})
-
end
-
-
3
opts.merge!(:remote_repo_base => remote_repo_base, :diff => diff, :namespace => namespace)
-
3
datatype = :module_diff if diff
-
-
# rest_ok_response filter_by_namespace(ComponentModule.list(opts)), :datatype => datatype
-
3
rest_ok_response ComponentModule.list(opts), :datatype => datatype
-
end
-
-
1
def rest__get_workspace_branch_info()
-
component_module = create_obj(:component_module_id)
-
version = ret_version()
-
rest_ok_response component_module.get_workspace_branch_info(version)
-
end
-
-
1
def rest__info()
-
module_id = ret_request_param_id_optional(:component_module_id, ::DTK::ComponentModule)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
rest_ok_response ComponentModule.info(model_handle(), module_id, opts)
-
end
-
-
1
def rest__list_remote_diffs()
-
component_module = create_obj(:component_module_id)
-
version = nil
-
rest_ok_response component_module.list_remote_diffs(version)
-
end
-
-
#
-
# Method will check new dependencies on repo manager and report missing dependencies.
-
# Response will return list of modules for given component.
-
#
-
1
def rest__resolve_pull_from_remote()
-
rest_ok_response resolve_pull_from_remote(:component_module)
-
end
-
-
1
def rest__pull_from_remote()
-
rest_ok_response pull_from_remote_helper(ComponentModule)
-
end
-
-
1
def rest__remote_chmod()
-
response = chmod_from_remote_helper()
-
rest_ok_response(response)
-
end
-
-
1
def rest__remote_chown()
-
chown_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__confirm_make_public()
-
rest_ok_response confirm_make_public_helper()
-
end
-
-
1
def rest__remote_collaboration()
-
collaboration_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__list_remote_collaboration()
-
response = list_collaboration_from_remote_helper()
-
rest_ok_response response
-
end
-
-
1
def rest__versions()
-
component_module = create_obj(:component_module_id)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
-
rest_ok_response component_module.local_and_remote_versions(client_rsa_pub_key, opts)
-
end
-
-
1
def rest__info_about()
-
component_module = create_obj(:component_module_id)
-
about = ret_non_null_request_params(:about).to_sym
-
component_template_id = ret_request_params(:component_template_id)
-
unless AboutEnum.include?(about)
-
raise ErrorUsage::BadParamValue.new(:about,AboutEnum)
-
end
-
rest_ok_response component_module.info_about(about, component_template_id)
-
end
-
-
1
AboutEnum = [:components, :attributes, :instances]
-
-
#### end: list and info actions ###
-
-
#### actions to interact with remote repos ###
-
# TODO: rename; this is just called by install; import ops call create route
-
1
def rest__import()
-
rest_ok_response install_from_dtkn_helper(:component_module)
-
end
-
-
# TODO: rename; this is just called by publish
-
1
def rest__export()
-
component_module = create_obj(:component_module_id)
-
rest_ok_response publish_to_dtkn_helper(component_module)
-
end
-
-
1
def rest__install_puppet_forge_modules()
-
pf_full_name = ret_non_null_request_params(:puppetf_module_name)
-
namespace,module_name = ret_namespace_and_module_name_for_puppet_forge(pf_full_name)
-
puppet_version = ret_request_params_force_nil(:puppet_version)
-
project = get_default_project()
-
-
# will raise exception if exists
-
ComponentModule.if_module_exists!(project.id_handle(), module_name, namespace,
-
"Cannot install '#{namespace}:#{module_name}' since it already exists!"
-
)
-
-
puppet_forge_local_copy = nil
-
install_info = Hash.new
-
-
begin
-
# will raise an exception in case of error
-
# This creates a temporary directory after using puppet forge client to import
-
MessageQueue.store(:info, "Started puppet forge install of module '#{pf_full_name}' ...")
-
puppet_forge_local_copy = PuppetForge::Client.install(pf_full_name, puppet_version)
-
opts = {:config_agent_type => ret_config_agent_type()}
-
opts = namespace ? {:base_namespace => namespace} : {}
-
MessageQueue.store(:info, "Puppet forge module installed, parsing content ...")
-
install_info = ComponentModule.import_from_puppet_forge(project, puppet_forge_local_copy, opts)
-
ensure
-
puppet_forge_local_copy.delete_base_install_dir?() if puppet_forge_local_copy
-
end
-
rest_ok_response install_info
-
end
-
-
# this should be called when the module is linked, but the specfic version is not
-
1
def rest__import_version()
-
component_module = create_obj(:component_module_id)
-
remote_repo = ret_remote_repo()
-
version = ret_version()
-
rest_ok_response component_module.import_version(remote_repo,version)
-
end
-
-
# TODO: ModuleBranch::Location: harmonize this signature with one for service module
-
1
def rest__delete_remote()
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
remote_namespace = ret_request_params(:remote_module_namespace)
-
force_delete = ret_request_param_boolean(:force_delete)
-
-
opts = Hash.new
-
opts.merge!(:namespace => remote_namespace) unless remote_namespace.empty?
-
-
remote_namespace, remote_module_name = Repo::Remote::split_qualified_name(ret_non_null_request_params(:remote_module_name), opts)
-
remote_params = remote_params_dtkn(:component_module, remote_namespace, remote_module_name)
-
-
project = get_default_project()
-
ComponentModule.delete_remote(project, remote_params, client_rsa_pub_key, force_delete)
-
-
rest_ok_response
-
end
-
-
1
def rest__list_remote()
-
5
module_list = ComponentModule.list_remotes(model_handle, ret_request_params(:rsa_pub_key))
-
1
rest_ok_response filter_by_namespace(module_list), :datatype => :module_remote
-
end
-
-
# get remote_module_info; throws an access rights usage error if user does not have access
-
1
def rest__get_remote_module_info()
-
component_module = create_obj(:component_module_id)
-
rest_ok_response get_remote_module_info_helper(component_module)
-
end
-
-
#### end: actions to interact with remote repo ###
-
-
#### actions to manage workspace
-
-
1
def rest__create_new_version()
-
component_module = create_obj(:component_module_id)
-
version = ret_version()
-
-
component_module.create_new_version(version)
-
rest_ok_response
-
end
-
-
1
def rest__create_new_dsl_version()
-
component_module = create_obj(:component_module_id)
-
dsl_version = ret_non_null_request_params(:dsl_version).to_i
-
module_version = ret_version()
-
format = :json
-
component_module.create_new_dsl_version(dsl_version,format,module_version)
-
rest_ok_response
-
end
-
-
#### end: actions to manage workspace and promote changes from workspace to library ###
-
-
1
def rest__push_to_mirror()
-
component_module = create_obj(:component_module_id)
-
mirror_host = ret_non_null_request_params(:mirror_host)
-
component_module.push_to_mirror(mirror_host)
-
end
-
-
1
def rest__info_git_remote()
-
component_module = create_obj(:component_module_id)
-
info_git_remote(component_module)
-
end
-
-
1
def rest__add_git_remote()
-
component_module = create_obj(:component_module_id)
-
add_git_remote(component_module)
-
end
-
-
1
def rest__remove_git_remote()
-
component_module = create_obj(:component_module_id)
-
remove_git_remote(component_module)
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class DatacenterController < AuthController
-
1
def create(name)
-
c = ret_session_context_id()
-
Target.create(name,c)
-
"target created with name #{name}"
-
end
-
-
1
def get_warnings(id)
-
datacenter = get_object_by_id(id)
-
notification_list = datacenter.get_violation_info("warning")
-
notification_list.each_with_index do |n,index|
-
notification_list[index][:type] = "warning"
-
end
-
# DEBUG
-
# pp [:warnings,notification_list]
-
return {:data=>notification_list}
-
end
-
-
1
def load_vspace(datacenter_id)
-
datacenter = id_handle(datacenter_id,:datacenter).create_object()
-
datacenter_id = datacenter.id()
-
-
# TODO: how to retrieve fields from instance?
-
dc_hash = get_object_by_id(datacenter_id,:datacenter)
-
-
# TODO: revisit when cleaning up toolbar, plugins and user settings
-
tpl = R8Tpl::TemplateR8.new("workspace/notification_list",user_context())
-
tpl.set_js_tpl_name("notification_list")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
include_js('plugins/search.cmdhandler')
-
view_space = {
-
:type => 'datacenter',
-
:i18n => 'Environments',
-
:object => dc_hash
-
}
-
# v_space_obj = JSON.generate(view_space)
-
# run_javascript("R8.Workspace.pushViewSpace(#{v_space_obj});")
-
-
#--------Setup Toolbar for access each group from ACL's---------
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
user_has_toolbar_access = true
-
user_group_tool_list = Array.new
-
user_group_tool_list << 'quicksearch'
-
toolbar_def = {:tools => user_group_tool_list}
-
-
include_js('toolbar.quicksearch.r8')
-
-
tpl_info_hash = Hash.new
-
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_display",user_context())
-
tpl.set_js_tpl_name("ng_wspace_display")
-
tpl_info_hash[:node_group] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node_group][:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display",user_context())
-
tpl.set_js_tpl_name("node_wspace_display")
-
tpl_info_hash[:node] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node][:src])
-
-
tpl = R8Tpl::TemplateR8.new("datacenter/wspace_monitor_display",user_context())
-
tpl.set_js_tpl_name("wspace_monitor_display")
-
tpl_info_hash[:monitor] = tpl.render()
-
include_js_tpl(tpl_info_hash[:monitor][:src])
-
-
##### ----------------- add in model info
-
model_list = datacenter.get_items()
-
-
items = model_list.map do |object|
-
object_id_sym = object.id.to_s.to_sym
-
ui = ((dc_hash[:ui]||{})[:items]||{})[object_id_sym] || (object[:ui]||{})[datacenter_id.to_s.to_sym]
-
-
obj_tags = object[:display_name].split(',')
-
model_name = object.model_name
-
type = (obj_tags.include?("monitor")) ? :monitor : model_name
-
{
-
:type => type.to_s,
-
:model => model_name.to_s,
-
:object => object,
-
:toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info_hash[type][:template_callback],
-
:ui => ui,
-
:tags => obj_tags
-
}
-
end
-
view_space[:items] = items
-
view_space_json = JSON.generate(view_space)
-
run_javascript("R8.Workspace.pushViewSpace(#{view_space_json});")
-
-
-
#---------------------------------------------
-
-
return {
-
:content => '',
-
:panel => 'viewspace'
-
}
-
end
-
-
1
def update_vspace_ui(id)
-
# TODO: not used dc_hash = get_object_by_id(id,:datacenter)
-
# pp '*******UPDATING VSPACE UI***************'
-
# pp request.params
-
update_from_hash(id,{:ui=>JSON.parse(request.params["ui"])})
-
# update_from_hash(id,{:ui=>JSON.parse(request.params["ui"])}) if request.params["ui"].kind_of?(Hash)
-
return {}
-
end
-
-
1
def add_item(id)
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
target = id_handle(id).create_object()
-
-
override_attrs = request.params["ui"] ? {:ui=>request.params["ui"]} : {}
-
-
model_id_handle = id_handle(request.params["id"].to_i,request.params["model"].to_sym)
-
new_item_id = target.add_item(model_id_handle,override_attrs)
-
# id = new_id if new_id
-
-
# TODO: how do we get field info from model instance?
-
dc_hash = get_object_by_id(id,:datacenter)
-
dc_ui = dc_hash[:ui].nil? ? {:items=>{}} : dc_hash[:ui]
-
# TODO: cleanup later, right now ui req param indexed by dc id from old style
-
ui_params = JSON.parse(request.params["ui"])
-
dc_ui[:items][new_item_id.to_s.to_sym] = ui_params[id.to_s]
-
# TODO: any way to update a model from its object once an instance is created?
-
update_from_hash(id,{:ui=>dc_ui})
-
-
# TODO: clean this up,hack to update UI params for newly cloned object
-
# update_from_hash(id,{:ui=>hash["ui"]})
-
-
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
-
-
if request.params["model_redirect"]
-
base_redirect = "/xyz/#{request.params["model_redirect"]}/#{request.params["action_redirect"]}"
-
redirect_id = request.params["id_redirect"].match(/^\*/) ? new_item_id.to_s : request.params["id_redirect"]
-
redirect_route = "#{base_redirect}/#{redirect_id}"
-
request_params = ''
-
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
-
request.params.each do |name,value|
-
if !expected_params.include?(name)
-
request_params << '&' if request_params != ''
-
request_params << "#{name}=#{value}"
-
end
-
end
-
ajax_request? ? redirect_route += '.json' : nil
-
redirect_route << URI.encode("?#{request_params}") if request_params != ''
-
else
-
redirect_route = "/xyz/#{model_name()}/display/#{new_item_id.to_s}"
-
ajax_request? ? redirect_route += '.json' : nil
-
end
-
-
redirect redirect_route
-
end
-
-
# TODO: is this deprecated
-
1
def get_links(id)
-
datacenter = id_handle(id,:datacenter).create_object()
-
item_list = JSON.parse(request.params["item_list"])
-
item_list = item_list.reject do |x|
-
Log.error("get links missing needed params") unless x["id"] and x["model"]
-
end
-
# TODO: move this call into underlying get_links call,
-
item_list = item_list.map{|x|id_handle(x["id"].to_i,x["model"].to_sym)}
-
# TODO: make get_links an instance method, should pull all links from children if item_list is []/nil
-
link_list = datacenter.class.get_port_links(item_list,"component_external")
-
return {'data'=>link_list}
-
end
-
-
1
def wspace_edit
-
tpl = R8Tpl::TemplateR8.new("datacenter/wspace_edit",user_context())
-
tpl.assign(:_app,app_common())
-
tpl.assign(:submit_label,"Create Target")
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/environment.tool')
-
# run_javascript('setTimeout(initUserForm,500);')
-
run_javascript('R8.EnvironmentTool.init();')
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
end
-
end
-
1
r8_require("../model/developer")
-
1
module XYZ
-
1
class DeveloperController < AuthController
-
1
include DeveloperMixin
-
1
helper :node_helper
-
-
1
def rest__inject_agent()
-
params = ret_params_hash(:agent_files, :node_pattern, :node_list)
-
node_list = params[:node_list]||[]
-
nodes = get_nodes_from_params(node_list)
-
queue = ActionResultsQueue.new
-
-
DeveloperMixin.initiate_inject_agent(queue, nodes, params)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def get_nodes_from_params(node_list)
-
nodes = []
-
node_list.each do |n|
-
nodes << get_objects(:node, { :id => n.to_i}).flatten
-
end
-
return nodes.flatten
-
end
-
-
end
-
end
-
-
-
1
module XYZ
-
1
class EditorController < AuthController
-
-
1
def index
-
return {:data=>''}
-
end
-
-
1
def load_file(id)
-
files = {
-
"1" => 'apache2/templates/default/apache2.conf.erb',
-
"2" => 'apache2/metadata.json',
-
"3" => 'apache2/metadata.rb',
-
"4" => 'apache2/recipes/default.rb',
-
}
-
file_path = R8::Config[:editor_file_path]+'/'+files[id]
-
file_contents=IO.read(file_path)
-
-
return {:data => file_contents}
-
end
-
end
-
end
-
1
module XYZ
-
1
class File_assetController < AuthController
-
1
helper :i18n_string_mapping
-
-
1
def get(id)
-
file_asset = get_object_by_id(id)
-
file_asset[:name] = file_asset[:file_name]
-
-
file_asset[:content] = file_asset.get_content()
-
file_asset[:content] ||= 'ERROR RETRIEVING CONTENT'
-
=begin TODO FOR debugging
-
file_asset = {}
-
file_asset[:content] = 'this is some stubbed file content to return something---'+id.to_s
-
=end
-
return {:data=>file_asset}
-
end
-
-
1
def save_content()
-
# file_asset = get_object_by_id(id)
-
# file_asset.update_content(request.params["content"])
-
raise Error.new("no file id given") unless request.params["editor_file_id"]
-
file_asset = get_object_by_id(request.params["editor_file_id"])
-
file_asset.update_content(request.params["editor_file_content"])
-
-
return {:data=>{}}
-
end
-
-
1
def test_get(*path_array)
-
path = path_array.join("/")
-
repo,af_path = (path =~ Regexp.new("(^[^/]+)/(.+$)"); [$1,$2])
-
sp_hash = {
-
:filter => [:eq, :path, af_path],
-
:cols => [:id,:path,:implementation_info]
-
}
-
mh = ModelHandle.new(ret_session_context_id(),:file_asset)
-
file_asset = Model.get_objects_from_sp_hash(mh,sp_hash).find{|x|x[:implementation][:repo] == repo}
-
raise "file asset #{path} not found" unless file_asset
-
pp file_asset
-
contents = file_asset.get_content()
-
contents.each_line{|l|STDOUT << l}
-
STDOUT << "\n"
-
{:content=>"<pre>#{contents}</pre>"}
-
end
-
-
1
def test_add(*path_array)
-
path = path_array.join("/")
-
repo,af_path = (path =~ Regexp.new("(^[^/]+)/(.+$)"); [$1,$2])
-
sp_hash = {
-
:filter => [:eq, :ref, repo],
-
:cols => [:id,:type]
-
}
-
mh = ModelHandle.new(ret_session_context_id(),:implementation)
-
impl = Model.get_objects_from_sp_hash(mh,sp_hash).first
-
raise "implementation #{repo} not found" unless impl
-
impl.add_asset_file(af_path)
-
{:content => nil}
-
end
-
end
-
end
-
1
module XYZ
-
1
class IdeController < Controller
-
-
1
def index()
-
projects = Project.get_all(model_handle(:project))
-
pp [:projects,projects]
-
-
projects.each_with_index { |p,i|
-
projects[i][:tree] = {}
-
projects[i][:tree][:targets] = p.get_target_tree()
-
projects[i][:tree][:implementations] = p.get_module_tree(:include_file_assets => true)
-
projects[i][:name] = projects[i][:display_name]
-
}
-
tpl = R8Tpl::TemplateR8.new("ide/project_tree_leaf",user_context())
-
tpl.set_js_tpl_name("project_tree_leaf")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("ide/l_panel",user_context())
-
tpl.set_js_tpl_name("l_panel")
-
# tpl = R8Tpl::TemplateR8.new("ide/panel_frame",user_context())
-
# tpl.set_js_tpl_name("ide_panel_frame")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("ide/editor_panel",user_context())
-
tpl.set_js_tpl_name("editor_panel")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
#==========================
-
# Include target specific js that will be needed
-
# TODO: move out of here eventually
-
tpl_info_hash = Hash.new
-
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_display",user_context())
-
tpl.set_js_tpl_name("ng_wspace_display")
-
tpl_info_hash[:node_group] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node_group][:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display_ide",user_context())
-
tpl.set_js_tpl_name("node_wspace_display_ide")
-
tpl_info_hash[:node] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node][:src])
-
-
tpl = R8Tpl::TemplateR8.new("datacenter/wspace_monitor_display",user_context())
-
tpl.set_js_tpl_name("wspace_monitor_display")
-
tpl_info_hash[:monitor] = tpl.render()
-
include_js_tpl(tpl_info_hash[:monitor][:src])
-
-
tpl = R8Tpl::TemplateR8.new("workspace/notification_list_ide",user_context())
-
tpl.set_js_tpl_name("notification_list_ide")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/library_search",user_context())
-
tpl.set_js_tpl_name("component_library_search")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/library_search",user_context())
-
tpl.set_js_tpl_name("node_library_search")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("assembly/library_search",user_context())
-
tpl.set_js_tpl_name("assembly_library_search")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
#==========================
-
-
# include_js('plugins/search.cmdhandler2')
-
include_js('plugins/r8.cmdbar.assemblies')
-
include_js('plugins/r8.cmdbar.components')
-
include_js('plugins/r8.cmdbar.nodes')
-
include_js('plugins/r8.cmdbar.tasks')
-
-
projects_json = JSON.generate(projects)
-
# TODO: figure out why this user init isnt firing inside of bundle and return
-
# DEBUG
-
run_javascript("R8.User.init();")
-
run_javascript("R8.IDE.init(#{projects_json});")
-
-
# run_javascript("R8.IDE.addProjects(#{projects_json});")
-
-
# tpl = R8Tpl::TemplateR8.new("ide/test_tree2",user_context())
-
# run_javascript("R8.IDE.testTree();")
-
-
return {:content=>tpl.render(),:panel=>'project_panel'}
-
-
# return {:content=>''}
-
end
-
-
1
def new_project()
-
tpl = R8Tpl::TemplateR8.new("ide/new_project",user_context())
-
tpl.assign(:_app,app_common())
-
# tpl.assign(:required_attr_list,required_attr_list)
-
-
targets = Array.new
-
targets = [{
-
:id => '234sadf',
-
:name => 'AWS 1 - East Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - West Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - EU West Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - Asia (Singapore) Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - Asia (Japan) Region'
-
}
-
]
-
tpl.assign(:targets,targets)
-
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/create_project.tool')
-
-
run_javascript("R8.CreateProjectTool.init();")
-
# run_javascript("R8.IDE.initCreateProject();")
-
# run_javascript("R8.CommitTool2.renderTree(#{commit_tree_json},'edit','change-list-tab-content');")
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def new_target()
-
tpl = R8Tpl::TemplateR8.new("ide/new_target",user_context())
-
tpl.assign(:_app,app_common())
-
# tpl.assign(:required_attr_list,required_attr_list)
-
-
targets = Array.new
-
targets = [{
-
:id => '234sadf',
-
:name => 'AWS 1 - East Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - West Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - EU West Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - Asia (Singapore) Region'
-
},
-
{
-
:id => '234sadf',
-
:name => 'AWS 1 - Asia (Japan) Region'
-
}
-
]
-
tpl.assign(:targets,targets)
-
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/create_target.tool')
-
-
run_javascript("R8.CreateTargetTool.init();")
-
# run_javascript("R8.IDE.initCreateProject();")
-
# run_javascript("R8.CommitTool2.renderTree(#{commit_tree_json},'edit','change-list-tab-content');")
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def test_tree()
-
# tpl = R8Tpl::TemplateR8.new("ide/test_tree",user_context())
-
-
run_javascript("R8.IDE.init();")
-
run_javascript("R8.IDE.testTree();")
-
# return {:content=>tpl.render(),:panel=>'editor-panel'}
-
return {:content=>'this is garb!!!!',:panel=>'editor-panel'}
-
end
-
end
-
-
end
-
1
module XYZ
-
1
class ImplementationController < AuthController
-
# TODO: see what to keep
-
###TODO: for testing
-
-
1
def delete_module(module_name)
-
Implementation.delete_repos_and_implementations(model_handle,module_name)
-
{:content => {}}
-
end
-
-
###################
-
1
def replace_library_implementation(proj_impl_id)
-
create_object_from_id(proj_impl_id).replace_library_impl_with_proj_impl()
-
return {:content => {}}
-
end
-
-
1
def get_tree(implementation_id)
-
# TODO: should be passed proj_impl_id; below is hack to set if it is given libary ancesor
-
impl_hack = create_object_from_id(implementation_id)
-
if impl_hack.update_object!(:project_project_id)[:project_project_id]
-
proj_impl_id = implementation_id
-
else
-
proj_impl = Model.get_obj(impl_hack.model_handle,{:cols => [:id],:filter => [:eq, :ancestor_id,impl_hack[:id]]})
-
proj_impl_id = proj_impl[:id]
-
end
-
-
impl = create_object_from_id(proj_impl_id)
-
opts = {:include_file_assets => true}
-
impl_tree = impl.get_module_tree(opts)
-
-
impl_tree.first[:id] = implementation_id.to_i #TODO: part of hack
-
-
{:data => impl_tree}
-
end
-
end
-
end
-
-
1
require 'fileutils'
-
-
1
module XYZ
-
1
class ImportController < AuthController
-
-
1
def index
-
return {:data=>''}
-
end
-
-
1
def load_wizard()
-
tpl = R8Tpl::TemplateR8.new("ide/panel",user_context())
-
tpl.set_js_tpl_name("workspace_panel")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_one",user_context())
-
tpl.set_js_tpl_name("import_step_one")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_two",user_context())
-
tpl.set_js_tpl_name("import_step_two")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_three",user_context())
-
tpl.set_js_tpl_name("import_step_three")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_four",user_context())
-
tpl.set_js_tpl_name("import_step_four")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_five",user_context())
-
tpl.set_js_tpl_name("import_step_five")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/display_attribute",user_context())
-
tpl.set_js_tpl_name("import_display_attribute")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/edit_attribute",user_context())
-
tpl.set_js_tpl_name("import_edit_attribute")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
# run_javascript("R8.User.init();")
-
# run_javascript("R8.Import.init(#{import_json},2);")
-
run_javascript("R8.Import.loadWizard();")
-
-
return {:content=>''}
-
end
-
-
1
def step_one()
-
module_upload = request.params["module_package"]
-
pkg_filename = module_upload[:filename]
-
tmp_file_handle = module_upload[:tempfile]
-
-
# TODO: module_name; this is hack to get it from tar.gz file name
-
module_name = pkg_filename.gsub(/\.tar\.gz$/,"")
-
prefixes_to_strip = %w{puppetlabs ghoneycutt} #TODO: complate hack
-
prefixes_to_strip.each{|pre|module_name.gsub!(Regexp.new("^#{pre}-"),"")}
-
module_name.gsub!(/-[0-9]+\.[0-9]+\.[0-9]+$/,"")
-
-
# mv the tmp file to under CompressedFileStore
-
tmp_path = tmp_file_handle.path
-
tmp_file_handle.close
-
compressed_file = "#{R8::EnvironmentConfig::CompressedFileStore}/#{pkg_filename}"
-
FileUtils.mv tmp_path, compressed_file
-
-
config_agent_type = :puppet
-
library_idh = Library.get_users_private_library(model_handle(:library)).id_handle()
-
raise Error.new("TODO: fix up: Implementation.create_library_repo_and_implementation has been removed")
-
repo_obj,impl_obj = Implementation.create_library_repo_and_implementation(library_idh,module_name,config_agent_type, :delete_if_exists => true)
-
-
repo_name = repo_obj[:repo_name]
-
module_dir = repo_obj[:local_dir]
-
base_dir = repo_obj[:base_dir]
-
-
# EXTRACT AND PARSE CODE-----------------------------
-
user_obj = CurrentSession.new.get_user_object()
-
username = user_obj[:username]
-
repo_name = "#{username}-#{config_agent_type}-#{module_name}"
-
-
opts = {:strip_prefix_count => 1}
-
base_dir = R8::Config[:repo][:base_directory]
-
-
# begin capture here so can rerun even after loading in dir already
-
begin
-
# extract tar.gz file into directory
-
Extract.single_module_into_directory(compressed_file,repo_name,base_dir,opts)
-
rescue Exception => e
-
# raise e
-
end
-
-
opts = {:strip_prefix_count => 1}
-
# begin capture here so can rerun even after loading in dir already
-
begin
-
# extract tar.gz file into directory
-
Extract.single_module_into_directory(compressed_file,repo_name,base_dir,opts)
-
rescue Exception => e
-
# raise e
-
end
-
-
impl_obj.create_file_assets_from_dir_els()
-
-
# parsing
-
begin
-
raise Error.new("ConfigAgent.parse_given_module_directory(config_agent_type,module_dir) needs to be converted to form ConfigAgent.parse_given_module_directory(config_agent_type,impl_obj")
-
r8_parse = ConfigAgent.parse_given_module_directory(config_agent_type,module_dir)
-
rescue ErrorUsage::Parsing => error
-
return {
-
:data=> {:errors=>error} #TODO: must be changed
-
}
-
# TODO: deprecated this rescue R8ParseError => e
-
rescue => e
-
pp [:r8_parse_error, e.to_s]
-
return {
-
:data=> {:errors=>{:type=>"parse",:error=>e.to_s}}
-
}
-
end
-
-
meta_generator = GenerateDSL.create()
-
refinement_hash = meta_generator.generate_refinement_hash(r8_parse,module_name,impl_obj.id_handle)
-
return {
-
:data=> refinement_hash
-
}
-
-
# pp refinement_hash
-
-
# in between here refinement has would have through user interaction the user set the needed unknowns
-
# mock_user_updates_hash!(refinement_hash)
-
r8meta_hash = refinement_hash.render_hash_form()
-
# TODO: currently version not handled
-
r8meta_hash.delete("version")
-
r8meta_path = "#{module_dir}/r8meta.#{config_agent_type}.yml"
-
r8meta_hash.write_yaml(STDOUT)
-
File.open(r8meta_path,"w"){|f|r8meta_hash.write_yaml(f)}
-
-
pp r8meta_hash
-
return {
-
:data=> r8meta_hash
-
}
-
=begin
-
Not reached
-
return {
-
:data=> {
-
:import_id=>pkg_root
-
}
-
}
-
=end
-
end
-
-
1
def step_two(id)
-
files = {
-
"1" => 'hadoop.rb',
-
"2" => 'gearman.rb',
-
}
-
file_path = R8::Config[:puppet_test_import_path]+'/'+files[id]
-
-
if File.exists?(file_path)
-
import_content = eval(IO.read(file_path))
-
import_json = JSON.generate(import_content)
-
-
tpl = R8Tpl::TemplateR8.new("ide/panel",user_context())
-
tpl.set_js_tpl_name("workspace_panel")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_two",user_context())
-
tpl.set_js_tpl_name("import_step_two")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_three",user_context())
-
tpl.set_js_tpl_name("import_step_three")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/step_four",user_context())
-
tpl.set_js_tpl_name("import_step_four")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/display_attribute",user_context())
-
tpl.set_js_tpl_name("import_display_attribute")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("import/edit_attribute",user_context())
-
tpl.set_js_tpl_name("import_edit_attribute")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
run_javascript("R8.User.init();")
-
run_javascript("R8.Import.init(#{import_json},2);")
-
-
return {:content=>''}
-
end
-
-
return {:data => ''}
-
end
-
-
1
def finish()
-
meta_info_hash = JSON.parse(request.params["import_def"])
-
pp meta_info_hash
-
ModuleDSL::GenerateFromImpl.save_dsl_info(meta_info_hash,model_handle(:implementation))
-
{:data => ''}
-
end
-
end
-
end
-
# TODO: clean up
-
-
# Base controller
-
1
module DTK
-
1
class Controller < ::Ramaze::Controller
-
-
1
ENCRYPTION_SALT = '397dedcf120682329a34a00a9bc768dfdf34062c'
-
1
helper :common
-
1
helper :version_helper
-
1
helper :general_processing
-
1
helper :process_search_object
-
1
helper :user
-
1
helper :rest
-
1
helper :bundle_and_return_helper
-
1
trait :user_model => XYZ::User
-
-
1
include R8Tpl::CommonMixin
-
1
include R8Tpl::Utility::I18n
-
-
21
provide(:html, :type => 'text/html'){|a,s|s} #lamba{|a,s|s} is fn called after bundle and render for a html request
-
1
provide(:json, :type => 'application/json'){|a,s|s}
-
-
1
layout :bundle_and_return
-
-
# TODO: work around to handle falser8 calls
-
1
def falser8(*args)
-
{:content => nil}
-
end
-
-
### bundle and return top fns (TODO: see if way to put in helper
-
1
def bundle_and_return
-
10
return rest_response() if rest_request?()
-
-
include_js('cache/model_defs.cache')
-
include_js('cache/model.i18n.cache')
-
# TODO: get things cleaned up after implemented :json/js responses
-
unless json_response?()
-
# TODO: rather than using @layout; may calculate it dynamically here
-
layout_name = "#{@layout || :default}.layout"
-
-
js_includes = Array.new
-
css_includes = Array.new
-
js_exe_list = Array.new
-
-
panels_content = Hash.new
-
@ctrl_results[:as_run_list].each do |action_namespace|
-
(@ctrl_results[action_namespace][:content]||[]).each do |content_item|
-
assign_type = content_item[:assign_type]
-
panel = content_item[:panel]
-
content = content_item[:content]
-
-
case assign_type
-
when :append
-
(panels_content[panel].nil?) ?
-
panels_content[panel] = content :
-
panels_content[panel] << content
-
when :replace
-
panels_content[panel] = content
-
when :prepend
-
if(panels_content[panel].nil?)
-
panels_content[panel] = content
-
else
-
tmp_contents = panels_content[panel]
-
panels_content[panel] = content + tmp_contents
-
end
-
end
-
end
-
-
if !@ctrl_results[action_namespace][:js_includes].nil?
-
@ctrl_results[action_namespace][:js_includes].each { |js_include| js_includes << js_include }
-
end
-
if !@ctrl_results[action_namespace][:css_includes].nil?
-
@ctrl_results[action_namespace][:css_includes].each { |css_include| css_includes << css_include }
-
end
-
-
if !@ctrl_results[action_namespace][:js_exe_list].nil?
-
@ctrl_results[action_namespace][:js_exe_list].each { |js_exe| js_exe_list << js_exe }
-
end
-
-
# TODO: process js_exe_scripts
-
end
-
-
# TODO: temp hack, need to figure out how to get js cache files included better way
-
@js_includes.each { |js_include| js_includes << js_include }
-
-
_app = {
-
:js_includes => js_includes,
-
:css_includes => css_includes,
-
:js_exe_list => js_exe_list,
-
:base_uri => R8::Config[:base_uri],
-
:base_css_uri => R8::Config[:base_css_uri],
-
:base_js_uri => R8::Config[:base_js_uri],
-
:base_images_uri => R8::Config[:base_images_uri],
-
}
-
template_vars = {
-
:_app => _app,
-
:main_menu => String.new,
-
:left_col => String.new
-
}
-
-
panels_content.each { |key,value|
-
template_vars[key] = value
-
}
-
-
# TODO: what is :layout for in the class sig?
-
tpl = R8Tpl::TemplateR8.new(layout_name,user_context(),:layout)
-
template_vars.each{|k,v|tpl.assign(k.to_sym,v)}
-
tpl_return = tpl.render()
-
-
return tpl_return
-
else
-
# TODO: more fully implement config passing between server/client
-
@ctrl_results[:config] = {
-
:base_uri => "#{R8::Config[:base_uri]}/xyz",
-
:date_format => 'MM/DD/YY',
-
:time_format => '12:00',
-
:etc => 'etc'
-
}
-
return JSON.pretty_generate(@ctrl_results)
-
end
-
end
-
-
# error handling
-
1
def handle_errors(&block)
-
begin
-
yield
-
rescue ErrorUsage => e
-
{:data=> {
-
"error" => {
-
"error_code" => 1,
-
"error_msg" => e.to_s
-
}
-
}
-
}
-
end
-
end
-
-
#####################################################
-
### MAIN ACTION DEFS
-
#####################################################
-
-
1
def rest__list()
-
search_object = ret_search_object_in_request()
-
raise Error.new("no search object in request") unless search_object
-
# TODO: may put search save and retrieve into rest interface
-
-
rest_ok_response Model.get_objects_from_search_object(search_object)
-
end
-
-
1
def list()
-
# TODO: dont think needed: user
-
search_object = ret_search_object_in_request()
-
raise Error.new("no search object in request") unless search_object
-
-
if search_object.needs_to_be_retrieved?
-
search_object.retrieve_from_saved_object!()
-
elsif search_object.should_save?
-
search_object.save(model_handle(:search_object))
-
end
-
-
# only create if need to and appropriate to do so
-
search_object.save_list_view_in_cache?(user_context())
-
-
paging_info = search_object.paging
-
order_by_list = search_object.order_by
-
-
_model_var = {:i18n => get_model_i18n(model_name().to_s,user_context())}
-
-
# TODO: figure out clean way to seperate search out
-
# TODO hack that for testing, which now only implemented by node
-
search_content = nil
-
if model_name() == :node or model_name() == :component or model_name() == :attribute
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/search",user_context())
-
-
# where clause {:relation => model_name().to_s} makes sure that only search queries of relevant type returned
-
_saved_search_list = get_objects(:search_object,{:relation => model_name().to_s})
-
-
# TODO: temp until more fully implementing select fields to be called in one off manner,right now
-
# select field for saved search dropdown is coded into view render search function
-
(_saved_search_list||[]).each_with_index do |so,index|
-
_saved_search_list[index][:selected] = (search_object && search_object[:id] == so[:id]) ? 'selected="1"' : ''
-
end
-
-
if(!search_object[:id])
-
search_context = model_name().to_s+'-list'
-
search_id = 'new';
-
search_object[:id] = search_id
-
add_js_exe("R8.Search.newSearchContext('#{search_context}');")
-
add_js_exe("R8.Search.addSearchObj('#{search_context}',#{search_object.json});")
-
else
-
search_context = model_name().to_s+'-list'
-
search_id = search_object[:id]
-
add_js_exe("R8.Search.newSearchContext('#{search_context}');")
-
add_js_exe("R8.Search.addSearchObj('#{search_context}',#{search_object.json});")
-
end
-
-
tpl.assign("_saved_search_list",_saved_search_list)
-
tpl.assign("num_saved_searches",_saved_search_list.length)
-
(_saved_search_list||[]).each do |so|
-
if(search_id != so[:id])
-
add_js_exe("R8.Search.addSearchObj('#{search_context}',#{so.json});")
-
end
-
end
-
-
# run_javascript("R8.Search.initSearchContext('#{search_context}','#{search_id}');")
-
tpl.assign(:search_id,search_id)
-
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
# tpl.assign("#{search_context}-current_start",(paging_info||{})[:start]||0)
-
tpl.assign(:current_start,(paging_info||{})[:start]||0)
-
tpl.assign(:_app,app_common())
-
tpl.assign(:search_id,search_id)
-
tpl.assign(:search_context,search_context)
-
-
field_set = Model::FieldSet.default(model_name)
-
model = ret_model_for_list_search(field_set)
-
tpl.assign(:model_name,model_name().to_s)
-
tpl.assign("#{model_name().to_s}",model)
-
-
search_content = tpl.render()
-
end
-
# end search testing hack
-
-
template_name = search_object.saved_search_template_name() || "#{model_name()}/#{default_action_name()}"
-
tpl = R8Tpl::TemplateR8.new(template_name,user_context())
-
field_set = search_object.field_set
-
-
tpl.assign(:search_content, search_content)
-
set_template_order_columns!(tpl,order_by_list,field_set)
-
set_template_paging_info!(tpl,paging_info)
-
-
# TODO: parent id is right now passed in opts, may change
-
# opts.merge!(:parent_id => parent_id) if parent_id
-
# model_list = get_objects(model_name(),where_clause,opts)
-
-
# TODO: temp hack to have default view only include component instances
-
component_default = nil
-
if search_object[:search_pattern].is_default_view? and model_name() == :component
-
search_object[:search_pattern][:filter] = [:neq,:node_node_id,nil]
-
component_default = true
-
end
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
-
# TODO: temp hack to have default view only include component instances
-
if component_default
-
model_list.reject!{|r|not id_handle(r[:node_node_id],:node)[:parent_model_name]==:datacenter}
-
end
-
-
if model_name() == :node
-
model_defs = get_model_defs(model_name())
-
model_list.each_with_index do |model_obj,index|
-
model_obj.each do |field,value|
-
if model_defs[:field_defs][field] && (model_defs[:field_defs][field][:type] == 'select' || model_defs[:field_defs][field][:type] == 'multiselect')
-
display_key = field.to_s+'_display'
-
model_list[index][display_key.to_sym] = _model_var[:i18n][:options_list][field][value]
-
end
-
end
-
end
-
end
-
-
tpl.assign("#{model_name().to_s}_list",model_list)
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign(:search_context,search_context)
-
tpl.assign(:_app,app_common())
-
-
panel_id = request.params['panel_id'] || 'main_body'
-
-
model_name = model_name().to_s
-
-
return { :content => tpl.render(), :panel => panel_id }
-
end
-
-
1
def list2()
-
panel_id = request.params['panel_id'] || 'main_body'
-
pp '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
-
pp panel_id
-
=begin
-
{"paging"=>"",
-
"search_name"=>"template search",
-
"search"=> "{
-
\"relation\":null,
-
\"id\":\"\",
-
\"search_pattern\":{
-
\":columns\":[\":containing_datacenter\",\":parent_name\",\":display_name\"],
-
\":filter\":[\":and\",[\":eq\",\":type\",\"template\"]],
-
\":order_by\":[],
-
\":paging\":{},
-
\":relation\":\"component\"
-
},
-
\"display_name\":\"template search\"}",
-
"order_by"=>""}
-
=end
-
-
search_object = ret_search_object_in_request()
-
raise Error.new("no search object in request") unless search_object
-
-
if search_object.needs_to_be_retrieved?
-
search_object.retrieve_from_saved_object!()
-
elsif search_object.should_save?
-
search_object.save
-
end
-
-
# only create if need to and appropriate to do so
-
search_object.save_list_view_in_cache?(user_context())
-
-
paging_info = search_object.paging
-
order_by_list = search_object.order_by
-
-
_model_var = {:i18n => get_model_i18n(model_name().to_s,user_context())}
-
-
# TODO: figure out clean way to seperate search out
-
# TODO hack that for testing, which now only implemented by node
-
search_content = nil
-
if model_name() == :node or model_name() == :component or model_name() == :attribute
-
# if model_name() == :node
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/search",user_context())
-
-
# where clause {:relation => model_name().to_s} makes sure that only search queries of relevant type returned
-
_saved_search_list = get_objects(:search_object,{:relation => model_name().to_s})
-
-
# TODO: temp until more fully implementing select fields to be called in one off manner,right now
-
# select field for saved search dropdown is coded into view render search function
-
(_saved_search_list||[]).each_with_index do |so,index|
-
_saved_search_list[index][:selected] = (search_object && search_object[:id] == so[:id]) ? 'selected="1"' : ''
-
end
-
-
if(!search_object[:id])
-
search_context = model_name().to_s+'-list'
-
search_id = 'new';
-
search_object[:id] = search_id
-
# add_js_exe("R8.Search.newSearchContext('#{search_context}');")
-
# add_js_exe("R8.Search.addSearchObj('#{search_context}',#{search_object.json});")
-
else
-
search_context = model_name().to_s+'-list'
-
search_id = search_object[:id]
-
# add_js_exe("R8.Search.newSearchContext('#{search_context}');")
-
# add_js_exe("R8.Search.addSearchObj('#{search_context}',#{search_object.json});")
-
end
-
-
tpl.assign("_saved_search_list",_saved_search_list)
-
tpl.assign("num_saved_searches",_saved_search_list.length)
-
(_saved_search_list||[]).each do |so|
-
if(search_id != so[:id])
-
# add_js_exe("R8.Search.addSearchObj('#{search_context}',#{so.json});")
-
end
-
end
-
-
# add_js_exe("R8.Search.initSearchContext('#{search_context}','#{search_id}');")
-
tpl.assign(:search_id,search_id)
-
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
# tpl.assign("#{search_context}-current_start",(paging_info||{})[:start]||0)
-
tpl.assign(:current_start,(paging_info||{})[:start]||0)
-
tpl.assign(:_app,app_common())
-
tpl.assign(:search_id,search_id)
-
tpl.assign(:search_context,search_context)
-
-
field_set = Model::FieldSet.default(model_name)
-
model = ret_model_for_list_search(field_set)
-
tpl.assign(:model_name,model_name().to_s)
-
tpl.assign("#{model_name().to_s}",model)
-
search_content = tpl.render()
-
end
-
-
template_name = search_object.saved_search_template_name() || "#{model_name()}/#{default_action_name()}"
-
tpl = R8Tpl::TemplateR8.new(template_name,user_context())
-
field_set = search_object.field_set
-
-
tpl.assign(:search_content, search_content)
-
set_template_order_columns!(tpl,order_by_list,field_set)
-
set_template_paging_info!(tpl,paging_info)
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
if model_name() == :node
-
model_defs = get_model_defs(model_name())
-
model_list.each_with_index do |model_obj,index|
-
model_obj.each do |field,value|
-
if model_defs[:field_defs][field] && (model_defs[:field_defs][field][:type] == 'select' || model_defs[:field_defs][field][:type] == 'multiselect')
-
display_key = field.to_s+'_display'
-
model_list[index][display_key.to_sym] = _model_var[:i18n][:options_list][field][value]
-
end
-
end
-
end
-
end
-
-
tpl.assign("#{model_name().to_s}_list",model_list)
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign(:search_context,search_context)
-
tpl.assign(:_app,app_common())
-
-
return {
-
:content => tpl.render(),
-
:panel => panel_id
-
}
-
end
-
-
# TODO: id and parsed query string shouldnt be passed, id should be available from route string
-
# TODO: need to figure out best way to handle parsed_query_string
-
1
def display(id,parsed_query_string=nil)
-
# how does it know what object to get?
-
model_result = get_object_by_id(id)
-
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{default_action_name()}",user_context())
-
tpl.assign(model_name(),model_result)
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
-
if model_name() == :node
-
model_defs = get_model_defs(model_name())
-
model_result.each do |field,value|
-
if model_defs[:field_defs][field] && (model_defs[:field_defs][field][:type] == 'select' || model_defs[:field_defs][field][:type] == 'multiselect')
-
display_key = field.to_s+'_display'
-
model_result[display_key.to_sym] = _model_var[:i18n][:options_list][field][value]
-
end
-
end
-
end
-
-
return {:content => tpl.render()}
-
end
-
-
# TODO: need to figure out best way to handle parsed_query_string
-
1
def edit(id,parsed_query_string=nil)
-
model_result = get_object_by_id(id)
-
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{default_action_name()}",user_context())
-
tpl.assign(model_name(),model_result)
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
-
if model_name() == :node
-
model_defs = get_model_defs(model_name())
-
model_result.each do |field,value|
-
if model_defs[:field_defs][field]
-
case model_defs[:field_defs][field][:type]
-
when 'select','multiselect'
-
options_list = _model_var[:i18n][:options_list][field]
-
ol_key = (field.to_s+'_options_list').to_sym
-
model_result[ol_key] = Hash.new
-
options_list.each do |value,label|
-
value_key = value+'_selected'
-
model_result[ol_key][value_key.to_sym] = (model_result[field] == value) ? ' selected="true"' : ''
-
end
-
# display_key = field.to_s+'_display'
-
# model_result[display_key.to_sym] = _model_var[:i18n][:options_list][field][value]
-
when 'date'
-
end
-
end
-
end
-
end
-
return {:content => tpl.render()}
-
end
-
### end of main action defs
-
-
# update or create depending on whether id is in post content
-
1
def save(explicit_hash=nil,opts={})
-
hash = explicit_hash || request.params.dup
-
### special fields
-
id = hash.delete("id")
-
id = nil if id.kind_of?(String) and id.empty?
-
parent_id = hash.delete("parent_id")
-
parent_model_name = hash.delete("parent_model_name")
-
model_name = hash.delete("model") || model_name()
-
name = hash.delete("name") || hash["display_name"]
-
redirect = (not (hash.delete("redirect").to_s == "false"))
-
-
# TODO: revisit during cleanup, return_model used for creating links
-
rm_val = hash.delete("return_model")
-
return_model = rm_val && rm_val == "true"
-
-
# TODO: fix up encapsulate translating from raw_hash to one for model
-
cols = Model::FieldSet.all_settable(model_name)
-
# delete all elements in hash that are not actual or virtual settable columns or ones that are null or have empty string value
-
hash.each do |k,v|
-
keep = (cols.include_col?(k.to_sym) and hash[k] and not (hash[k].respond_to?(:empty?) and hash[k].empty?))
-
unless keep
-
Log.info("in save function removing illegal column #{k} from model #{model_name}")
-
hash.delete(k)
-
end
-
end
-
-
if id
-
# update
-
update_from_hash(id.to_i,hash)
-
else
-
# create
-
# TODO: cleanup confusion over hash and string leys
-
hash.merge!({:display_name => name}) unless (hash.has_key?(:display_name) or hash.has_key?("display_name"))
-
parent_id_handle = nil
-
create_hash = nil
-
if parent_id
-
parent_id_handle = id_handle(parent_id,parent_model_name)
-
create_hash = {model_name.to_sym => {name => hash}}
-
else
-
parent_id_handle = top_level_factory_id_handle()
-
create_hash = {name.to_sym => hash}
-
end
-
new_id = create_from_hash(parent_id_handle,create_hash)
-
id = new_id if new_id
-
end
-
-
if rest_request?
-
return rest_ok_response(:id => id)
-
end
-
-
if return_model
-
return {:data=> get_object_by_id(id)}
-
end
-
-
return id if opts[:return_id]
-
redirect "/xyz/#{model_name()}/display/#{id.to_s}" if redirect
-
end
-
-
1
def clone(id)
-
handle_errors do
-
id_handle = id_handle(id)
-
hash = request.params
-
target_id_handle = nil
-
if hash["target_id"] and hash["target_model_name"]
-
input_target_id_handle = id_handle(hash["target_id"].to_i,hash["target_model_name"].to_sym)
-
target_id_handle = Model.find_real_target_id_handle(id_handle,input_target_id_handle)
-
else
-
Log.info("not implemented yet")
-
return redirect "/xyz/#{model_name()}/display/#{id.to_s}"
-
end
-
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
override_attrs = hash["ui"] ? {:ui=>hash["ui"]} : {}
-
target_object = target_id_handle.create_object()
-
clone_opts = id_handle.create_object().source_clone_info_opts()
-
new_obj = target_object.clone_into(id_handle.create_object(),override_attrs,clone_opts)
-
id = new_obj && new_obj.id()
-
-
# TODO: clean this up,hack to update UI params for newly cloned object
-
-
if hash["model_redirect"]
-
base_redirect = "/xyz/#{hash["model_redirect"]}/#{hash["action_redirect"]}"
-
redirect_id = hash["id_redirect"].match(/^\*/) ? id.to_s : hash["id_redirect"]
-
redirect_route = "#{base_redirect}/#{redirect_id}"
-
request_params = ''
-
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
-
request.params.each do |name,value|
-
if !expected_params.include?(name)
-
request_params << '&' if request_params != ''
-
request_params << "#{name}=#{value}"
-
end
-
end
-
ajax_request? ? redirect_route += '.json' : nil
-
redirect_route << URI.encode("?#{request_params}") if request_params != ''
-
else
-
redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
-
ajax_request? ? redirect_route += '.json' : nil
-
end
-
-
redirect redirect_route
-
end
-
end
-
-
1
private
-
1
def user_context()
-
@user_context ||= UserContext.new(self)
-
end
-
-
######################
-
#####Helper fns
-
-
# TODO: should be pushed down to model or something
-
1
def setup_fields_for_display(model)
-
model_def = get_model_defs(model_name().to_s)
-
model.each do |field,value|
-
field_sym = field.sym
-
# TODO: temp until fully implemented
-
model_def[field_sym] = {}
-
model_def[field_sym][:type] = 'etc'
-
case model_def[field_sym][:type]
-
when "select"
-
setup_options_field(model,field_sym,model_def[field_sym])
-
when "date"
-
when "checkbox"
-
when "etc"
-
end
-
end
-
end
-
-
# TODO: model meant to be handled and edited in reference style
-
1
def setup_options_field(model,field,model_def)
-
field_value = model[field]
-
display_col = (field.to_s+'_display').to_sym
-
model_options = get_model_options(model().to_s)
-
model[display_col] = model_options[field][field_value]
-
-
model_options[field].each do |key,option_value|
-
(field_value == option_value) ? object[(option_value+'_selected').to_sym] = ' selected' : object[(option_value+'_selected').to_sym] = ''
-
end
-
end
-
-
1
def http_host()
-
request.env["HTTP_HOST"]
-
end
-
-
# TBD: using temporaily before writing my owb error handling; from Toth
-
# will make this an errior helper
-
1
def error_405
-
error_layout 405, '405 Method Not Allowed', %[
-
<p>
-
The #{request.env['REQUEST_METHOD']} method is not allowed for the
-
requested URL.
-
</p>
-
]
-
end
-
-
1
def error_layout(status, title, content = '')
-
respond! %[
-
<html>
-
<head>
-
<title>#{h(title)}</title>
-
</head>
-
<body>
-
<h1>#{h(title)}</h1>
-
#{content}
-
</body>
-
</html>
-
].unindent, status
-
end
-
-
# TODO: this shouldnt be a controller method, should be in some util class or something
-
# html rendering helpers
-
1
def html_render_component_href(ref,component)
-
href = component[:link][:href] if component[:link]
-
href ||= ""
-
display = component[:display_name]
-
display ||= ""
-
"<a href=" + '"' + href + '">' + display + "</a>"
-
end
-
end
-
-
# END of Controller
-
1
class AuthController < Controller
-
-
1
before_all do
-
# Suspending this check for this time
-
9
check_user_authentication
-
end
-
-
1
def check_user_authentication
-
9
current_session = CurrentSession.new
-
-
9
if R8::Config[:session][:timeout][:disabled]
-
Log.debug "User session timeout has been disabled!"
-
return
-
end
-
-
9
if current_session.get_user_object().nil?
-
# Log.info "Missing authentication credentials, please log in again and re-try your request"
-
raise DTK::SessionError, "Missing authentication credentials, please log in again and re-try your request"
-
return
-
end
-
-
9
session.store(:last_ts, Time.now.to_i) if session.fetch(:last_ts).to_i == 0
-
-
9
if (Time.now.to_i - session.fetch(:last_ts).to_i) > (R8::Config[:session][:timeout][:hours].to_i).hours
-
# session expired
-
# Log.info "Session has expired due to inactivity, please log in again"
-
raise DTK::SessionTimeout, "Session has expired due to inactivity, please log in again"
-
else
-
9
session.store(:last_ts, Time.now.to_i)
-
# current_session.set_access_time(Time.now)
-
end
-
end
-
end
-
-
-
end
-
-
# system fns for controller
-
1
require __DIR__('action_set')
-
-
# TODO: Should all controllers/models be loaded, or load just base, and rest dynamically
-
-
# Here go your requires for subclasses of Controller:
-
# require __DIR__('admin')
-
# require __DIR__('data_source')
-
1
%w{ide project import account implementation library target inventory workspace state_change datacenter node_group node node_interface component attribute attribute_link port_link monitoring_item search_object viewspace user task assembly editor file_asset repo messages component_module service_module test_module node_module metadata developer namespace integration}.each do |controller_file|
-
36
require __DIR__(controller_file)
-
end
-
-
1
require 'ap'
-
-
1
module XYZ
-
1
class IntegrationController < Controller
-
-
1
def rest__spin_tenant
-
username, password, email = ret_non_null_request_params(:username, :password, :email)
-
-
# Rich: You have other params in request in case you need them
-
-
-
ap " Sync Started"
-
ap username
-
ap password
-
ap email
-
-
# Spin up tenants goes here
-
-
# notify back repoman that tenant is ready and repoman will send email
-
client = RepoManagerClient.new
-
ap client.notify_tenant_ready(email, username)
-
-
rest_ok_response
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class InventoryController < AuthController
-
-
1
def index
-
# TODO: what is proper where clause to generaly get managed nodes
-
node_list = get_objects(:node,{:type=>"staged"})
-
tpl = R8Tpl::TemplateR8.new("inventory/node_list",user_context())
-
-
pp node_list
-
-
run_javascript("R8.InventoryView.init('#{id}');")
-
-
return {:content => ""}
-
end
-
-
1
def seed_content_tpl
-
# TODO: what is proper where clause to generaly get managed nodes
-
tpl = R8Tpl::TemplateR8.new("inventory/seed_content_tpl",user_context())
-
-
return {:content => tpl.render()}
-
end
-
-
end
-
end
-
1
module DTK
-
1
class LibraryController < AuthController
-
1
def rest__info_about()
-
library = create_obj(:library_id)
-
about = ret_non_null_request_params(:about).to_sym
-
rest_ok_response library.info_about(about)
-
end
-
-
# TODO: see which of below should be deprecated
-
1
def import_implementation(implementation_name)
-
library_idh = Model.get_objs(model_handle,{:cols => [:id]}).first.id_handle() #TODO: stub
-
ImportImplementationPackage.add(library_idh,implementation_name)
-
{:content => {}}
-
end
-
-
1
def index
-
tpl = R8Tpl::TemplateR8.new("ui/panel",user_context())
-
tpl.set_js_tpl_name("ui_panel")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
run_javascript("R8.LibraryView.init('#{model_name}');")
-
return {:content => ''}
-
end
-
end
-
end
-
1
module DTK
-
1
class MessagesController < AuthController
-
1
def rest__retrieve()
-
messages = ::DTK::MessageQueue.retrive()
-
rest_ok_response messages
-
end
-
end
-
end
-
1
module XYZ
-
1
class MetadataController < Controller
-
1
def rest__get_metadata(file_name)
-
file = File.open(File.expand_path("../meta/tables_metadata/#{file_name}.json", File.dirname(__FILE__))).read
-
rest_ok_response file
-
end
-
end
-
end
-
1
module XYZ
-
1
class Monitoring_itemController < AuthController
-
-
# limit (hours) how long can nodes run
-
1
UP_TIME_LIMIT = R8::Config[:idle][:up_time_hours]
-
-
1
def list_for_component_display()
-
component_or_node_display()
-
end
-
1
def node_display()
-
component_or_node_display()
-
end
-
-
##
-
# Method will get all 'succeeded' assemblies and check their nodes for
-
# 'up time'. If one of the nodes has been running more than 'UP_TIME_LIMIT'
-
# all nodes of that assembly will be stopped.
-
#
-
1
def rest__check_idle()
-
prefix_log = "[CRON JOB]"
-
-
Log.info "#{prefix_log} Monitoring idle assemblies: START"
-
-
-
assemblies = Assembly::Instance.list(model_handle(:assembly),{})
-
-
str_identifer = (assemblies.map { |a| a[:display_name]}).join(', ')
-
-
Log.info "#{prefix_log} Monitoring assemblies: #{str_identifer}"
-
aws_connection = CloudConnect::EC2.new
-
-
# check statuses
-
assemblies.each do |assembly|
-
-
nodes = Assembly::Instance.get_nodes([assembly.id_handle], :type)
-
# flag to indicate if assembly nodes need to be stopped
-
stop_this_assembly = false
-
-
nodes.each do |node|
-
# skip if node is staged
-
next if 'staged'.eql?(node[:type])
-
-
# status of the nodes
-
response = aws_connection.get_instance_status(node.instance_id())
-
-
if response[:status].eql? :running
-
if (response[:up_time_hours] >= UP_TIME_LIMIT.to_i)
-
stop_this_assembly = true
-
break
-
end
-
end
-
end
-
-
# if one of the nodees is running to long we stop all nodes
-
if stop_this_assembly
-
str_identifer = (nodes.map { |n| n.name }).join(', ')
-
Log.info "#{prefix_log} Stopping assembly '#{assembly[:display_name]}', with nodes: '#{str_identifer}'"
-
CommandAndControl.stop_instances(nodes)
-
end
-
end
-
-
Log.info "#{prefix_log} Monitoring idle assemblies: END"
-
-
-
rest_ok_response({ :status => :ok })
-
end
-
-
1
private
-
# helper fn
-
1
def component_or_node_display()
-
search_object = ret_search_object_in_request()
-
raise Error.new("no search object in request") unless search_object
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
-
# TODO: should we be using default action name
-
action_name = :list
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name().to_s,user_context())
-
-
set_template_defaults_for_list!(tpl)
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("#{model_name()}_list",model_list)
-
-
return {:content => tpl.render()}
-
end
-
end
-
end
-
1
module DTK
-
1
class NamespaceController < AuthController
-
1
def rest__default_namespace_name()
-
rest_ok_response Namespace.default_namespace_name
-
end
-
end
-
end
-
1
module XYZ
-
1
class NodeController < AuthController
-
1
helper :node_helper
-
1
helper :rest_async
-
-
### mcollective actions
-
1
def rest__initiate_get_netstats()
-
node = create_node_obj(:node_id)
-
queue = ActionResultsQueue.new
-
# TODO: Move GetNetstas MColl action class to shared location between assembly and node controllers
-
Assembly::Instance::Action::GetNetstats.initiate([node], queue)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__initiate_get_ps()
-
node = create_node_obj(:node_id)
-
queue = ActionResultsQueue.new
-
-
Assembly::Instance::Action::GetPs.initiate([node], queue, :node)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__initiate_execute_tests()
-
node = create_node_obj(:node_id)
-
queue = ActionResultsQueue.new
-
-
Assembly::Instance::Action::ExecuteTestsV2.initiate([node], queue, :node)
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__get_action_results()
-
# TODO: to be safe need to garbage collect on ActionResultsQueue in case miss anything
-
action_results_id = ret_non_null_request_params(:action_results_id)
-
ret_only_if_complete = ret_request_param_boolean(:return_only_if_complete)
-
disable_post_processing = ret_request_param_boolean(:disable_post_processing)
-
response = nil
-
if ret_request_param_boolean(:using_simple_queue)
-
respone = rest_ok_response SimpleActionQueue.get_results(action_results_id)
-
else
-
response = rest_ok_response ActionResultsQueue.get_results(action_results_id,ret_only_if_complete,disable_post_processing)
-
end
-
-
return response
-
end
-
-
#### create and delete actions ###
-
1
def rest__add_component()
-
node = create_node_obj(:node_id)
-
component_template, component_title = ret_component_template_and_title(:component_template_name)
-
new_component_idh = node.add_component(component_template,:component_title => component_title)
-
rest_ok_response(:component_id => new_component_idh.get_id())
-
end
-
-
1
def rest__delete_component()
-
node = create_node_obj(:node_id)
-
# not checking here if component_id points to valid object; check is in delete_component
-
component_id = ret_non_null_request_params(:component_id)
-
node.delete_component(id_handle(component_id,:component))
-
rest_ok_response
-
end
-
-
1
def rest__destroy_and_delete()
-
node = create_node_obj(:node_id)
-
node.destroy_and_delete()
-
rest_ok_response
-
end
-
-
1
def rest__start()
-
node = create_node_obj(:node_id)
-
nodes = get_objects(:node, { :id => node[:id]})
-
node_idh = ret_request_param_id_handle(:node_id)
-
-
nodes, is_valid, error_msg = node_valid_for_aws?(nodes, :stopped)
-
-
unless is_valid
-
return rest_ok_response(:errors => [error_msg])
-
end
-
-
queue = SimpleActionQueue.new
-
-
user_object = ::DTK::CurrentSession.new.user_object()
-
CreateThread.defer_with_session(user_object, Ramaze::Current::session) do
-
# invoking command to start the nodes
-
CommandAndControl.start_instances(nodes)
-
-
task = Task.power_on_from_node(node_idh)
-
task.save!()
-
-
queue.set_result(:task_id => task.id)
-
end
-
-
rest_ok_response :action_results_id => queue.id
-
end
-
-
1
def rest__stop()
-
node = create_node_obj(:node_id)
-
nodes = get_objects(:node, { :id => node[:id]})
-
-
nodes, is_valid, error_msg = node_valid_for_aws?(nodes, :running)
-
-
unless is_valid
-
return rest_ok_response(:errors => [error_msg])
-
end
-
-
Node.stop_instances(nodes)
-
rest_ok_response :status => :ok
-
end
-
-
1
def node_valid_for_aws?(nodes, status_pattern)
-
# check if staged
-
if nodes.first[:type] == Node::Type::Node.staged
-
return nodes, false, "Node with id '#{nodes.first[:id]}' is 'staged' and as such cannot be started/stopped."
-
end
-
-
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
-
node = nodes.first[:admin_op_status] =~ Regexp.new("#{status_pattern.to_s}|pending")
-
if node.nil?
-
return nodes, false, "There are no #{status_pattern} nodes with id '#{nodes.first[:id]}'"
-
end
-
-
return nodes, true, nil
-
end
-
-
#### end: create and delete actions ###
-
-
#### list and info actions ###
-
1
def rest__list()
-
target_name, is_list_all = ret_request_params(:target_indentifier, :is_list_all)
-
-
target_id = DTK::Datacenter.name_to_id(model_handle(:datacenter), target_name) if target_name && !target_name.empty?
-
response = ret_nodes_by_subtype_class(model_handle(), { :target_id => target_id, :is_list_all => is_list_all })
-
rest_ok_response response
-
end
-
-
1
def rest__info()
-
node,subtype = ret_node_params_object_and_subtype()
-
unless subtype == :instance
-
raise ErrorUsage::BadParamValue.new(:subtype,subtype)
-
end
-
rest_ok_response node.info(:print_form=>true), :encode_into => :yaml
-
end
-
-
1
def rest__info_about()
-
node,subtype = ret_node_params_object_and_subtype()
-
about = ret_non_null_request_params(:about).to_sym
-
unless AboutEnum[subtype].include?(about)
-
raise ErrorUsage::BadParamValue.new(:about,AboutEnum[subtype])
-
end
-
rest_ok_response node.info_about(about)
-
end
-
1
AboutEnum = {
-
:instance => [:components,:attributes],
-
# :template => [:nodes,:components,:targets]
-
}
-
-
1
def rest__get_attributes()
-
node = create_node_obj(:node_id)
-
filter = ret_request_params(:filter)
-
opts = (filter ? {:filter => filter.to_sym} : {})
-
rest_ok_response node.get_attributes_print_form(opts)
-
end
-
-
# the body has an array each element of form
-
# {:pattern => PAT, :value => VAL}
-
# pat can be one of three forms
-
# 1 - an id
-
# 2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
-
# 3 - a pattern (TODO: give syntax) that can pick out multiple vars
-
# this returns same output as info about attributes, pruned for just new ones set
-
1
def rest__set_attributes()
-
node = create_node_obj(:node_id)
-
av_pairs = ret_params_av_pairs()
-
node.set_attributes(av_pairs)
-
rest_ok_response
-
end
-
-
#### end: list and info actions ###
-
-
#### creates tasks to execute/converge assemblies and monitor status
-
1
def rest__stage()
-
target = create_target_instance_with_default(:target_id)
-
unless node_binding_rs = node_binding_ruleset?(:node_template_identifier)
-
raise ErrorUsage.new("Missing node template identifier")
-
end
-
opts = Hash.new
-
if node_name = ret_request_params(:name)
-
opts[:override_attrs] = {:display_name => node_name}
-
end
-
node_instance_idh = node_binding_rs.clone_or_match(target,opts)
-
rest_ok_response :node_id => node_instance_idh.get_id()
-
end
-
-
1
def rest__find_violations()
-
node = create_node_obj(:node_id)
-
violation_objects = node.find_violations()
-
violation_table = violation_objects.map do |v|
-
{:type => v.type(),:description => v.description()}
-
end.sort{|a,b|a[:type].to_s <=> b[:type].to_s}
-
rest_ok_response violation_table
-
end
-
-
1
def rest__create_task()
-
node_idh = ret_request_param_id_handle(:node_id)
-
commit_msg = ret_request_params(:commit_msg)
-
unless task = Task.create_from_node(node_idh,commit_msg)
-
raise ErrorUsage.new("No changes to converge")
-
end
-
task.save!()
-
rest_ok_response :task_id => task.id
-
end
-
-
1
def rest__task_status()
-
node_idh = ret_request_param_id_handle(:node_id)
-
format = (ret_request_params(:format)||:hash).to_sym
-
rest_ok_response Task::Status::Node.get_status(node_idh,:format => format)
-
end
-
#### end: creates tasks to execute/converge assemblies and monitor status
-
-
1
def rest__image_upgrade()
-
old_image_id,new_image_id = ret_non_null_request_params(:old_image_id,:new_image_id)
-
Node::Template.image_upgrade(model_handle(),old_image_id,new_image_id)
-
rest_ok_response
-
end
-
-
1
def rest__add_node_template()
-
target = create_target_instance_with_default(:target_id)
-
node_template_name,image_id = ret_non_null_request_params(:node_template_name,:image_id)
-
opts = ret_params_hash(:operating_system,:size_array)
-
Node::Template.create_or_update_node_template(target,node_template_name,image_id,opts)
-
rest_ok_response
-
end
-
-
1
def rest__delete_node_template()
-
node_binding_ruleset = create_obj(:node_template_name, NodeBindingRuleset)
-
Node::Template.delete_node_template(node_binding_ruleset)
-
rest_ok_response
-
end
-
-
1
def rest__get_op_status()
-
node = create_node_obj(:node_id)
-
rest_deferred_response do |handle|
-
status = node.get_and_update_status!()
-
handle.rest_ok_response(:op_status => status)
-
end
-
end
-
-
##### TODO: below needs cleanup
-
-
1
def rest__add_to_group()
-
node_id, node_group_id = ret_non_null_request_params(:node_id, :node_group_id)
-
node_group = create_object_from_id(node_group_id,:node_group)
-
unless parent_id = node_group.update_object!(:datacenter_datacenter_id)[:datacenter_datacenter_id]
-
raise Error.new("node group with id (#{node_group_id.to_s}) given is not in a target")
-
end
-
node = create_object_from_id(node_id)
-
node_group.add_member(node,id_handle(parent_id,:target))
-
rest_ok_response
-
end
-
-
-
1
helper :i18n_string_mapping
-
-
1
def get(id)
-
node = create_object_from_id(id)
-
return {:data=>node.get_obj_with_common_cols()}
-
end
-
-
# TODO: this should be a post; so transitioning over
-
1
def destroy_and_delete(id=nil)
-
id ||= request.params["id"]
-
create_object_from_id(id).destroy_and_delete()
-
return {:data => {:id=>id,:result=>true}}
-
end
-
######
-
-
1
def actest
-
tpl = R8Tpl::TemplateR8.new("node/actest",user_context())
-
tpl.assign(:_app,app_common())
-
foo = tpl.render()
-
-
return {
-
:content => foo
-
}
-
end
-
1
def overlaytest
-
tpl = R8Tpl::TemplateR8.new("node/overlaytest",user_context())
-
tpl.assign(:_app,app_common())
-
foo = tpl.render()
-
-
return {
-
:content => foo
-
}
-
end
-
-
1
def dock_get_service_checks(id)
-
node = create_object_from_id(id)
-
node_service_checks = node.get_node_service_checks()
-
component_service_checks = node.get_component_service_checks()
-
pp [:node_service_checks,node_service_checks]
-
pp [:component_service_checks,component_service_checks]
-
tpl = R8Tpl::TemplateR8.new("dock/node_get_service_checks",user_context())
-
tpl.assign(:_app,app_common())
-
tpl.assign(:node_service_checks,node_service_checks)
-
tpl.assign(:component_service_checks,component_service_checks)
-
-
panel_id = request.params['panel_id']
-
-
return {
-
:content => tpl.render(),
-
:panel => panel_id
-
}
-
end
-
-
1
def dock_get_users(id)
-
node = create_object_from_id(id)
-
user_list = node.get_users()
-
-
tpl = R8Tpl::TemplateR8.new("dock/node_get_users",user_context())
-
tpl.assign(:_app,app_common())
-
tpl.assign(:user_list,user_list)
-
-
panel_id = request.params['panel_id']
-
-
return {
-
:content => tpl.render(),
-
:panel => panel_id
-
}
-
end
-
-
1
def dock_get_applications(id)
-
node = create_object_from_id(id)
-
app_list = node.get_applications()
-
-
tpl = R8Tpl::TemplateR8.new("dock/node_get_apps",user_context())
-
tpl.assign(:_app,app_common())
-
tpl.assign(:app_list,app_list)
-
-
panel_id = request.params['panel_id']
-
-
return {
-
:content => tpl.render(),
-
:panel => panel_id
-
}
-
end
-
-
1
def get_ports(id)
-
node = create_object_from_id(id)
-
port_list = node.get_ports("component_external","component_internal_external")
-
return {:data=>port_list}
-
end
-
-
1
def ac_remotesearch
-
pp '++++++++++++++++++++++++++++++'
-
pp request.params
-
results_array = Array.new
-
results_array << 'Michael Jordan'
-
results_array << 'Scotty Pippen'
-
results_array << 'Magic Johnson'
-
results_array << 'Larry Bird'
-
results_array << 'David Robinson'
-
results_array << 'LeBron James'
-
results_array << 'Al Harrington'
-
results_array << 'Baron Davis'
-
results_array << 'Charles Barkely'
-
results_array << 'Chuck Johnson'
-
results_array << 'Cal Hooper'
-
results_array << 'Dominique Wilkins'
-
pp results_array
-
-
return {
-
:data => results_array
-
}
-
end
-
-
1
def wspace_display(id)
-
c = ret_session_context_id()
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display",user_context())
-
-
tpl.set_js_tpl_name('node_wspace_display')
-
# node = get_object_by_id(id)
-
# node.delete(:image_size)
-
node = Node.get_wspace_display(IDHandle[:c => c, :guid => id])
-
-
# TODO: temp hack to stub things out
-
node[:operational_status] = 'good'
-
node[:model_name] = 'node'
-
-
tpl.assign(:node,node)
-
tpl.assign(:base_images_uri,R8::Config[:base_images_uri])
-
-
num_components = (node[:component]||[]).map{|x|x[:id]}.uniq.size
-
tpl.assign(:num_components,num_components)
-
-
_node_vars = {}
-
_node_vars[:i18n] = get_model_i18n("node",user_context())
-
tpl.assign("_node",_node_vars)
-
-
tpl_result = tpl.render()
-
tpl_result[:panel] = 'viewspace'
-
tpl_result[:assign_type] = 'append'
-
-
return tpl_result
-
end
-
-
1
def wspace_display_2(id)
-
# TODO: decide if toolbar is needed/used at node level
-
# need to augment for nodes that are in datacenter directly and not node groups
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display",user_context())
-
tpl.set_js_tpl_name("node_wspace_display")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
field_set = Model::FieldSet.default(:node)
-
node = get_object_by_id(id,:node)
-
# pp node_list
-
items = Array.new
-
item = {
-
:type => 'node',
-
:object => node,
-
:toolbar_def => {},
-
:tpl_callback => tpl_info[:template_callback],
-
:ui => node[:ui]
-
}
-
# DEBUG
-
=begin
-
item = {
-
:type => model_name.to_s,
-
:object => node,
-
:toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info[:template_callback],
-
:ui => node[:ui][datacenter_id.to_sym]
-
}
-
=end
-
items << item
-
-
addItemsObj = JSON.generate(items)
-
run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
-
#---------------------------------------------
-
-
return {}
-
end
-
-
1
def wspace_display_ide(id)
-
# TODO: decide if toolbar is needed/used at node level
-
# need to augment for nodes that are in datacenter directly and not node groups
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display_ide",user_context())
-
tpl.set_js_tpl_name("node_wspace_display_ide")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
field_set = Model::FieldSet.default(:node)
-
node = get_object_by_id(id,:node)
-
# pp node_list
-
items = Array.new
-
item = {
-
:type => 'node',
-
:object => node,
-
:toolbar_def => {},
-
:tpl_callback => tpl_info[:template_callback],
-
:ui => node[:ui]
-
}
-
# DEBUG
-
=begin
-
item = {
-
:type => model_name.to_s,
-
:object => node,
-
:toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info[:template_callback],
-
:ui => node[:ui][datacenter_id.to_sym]
-
}
-
=end
-
items << item
-
-
# addItemsObj = JSON.generate(items)
-
# run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
-
#---------------------------------------------
-
-
return {:data=>items}
-
end
-
-
1
def added_component_conf(id)
-
node = get_object_by_id(id)
-
display_name = node[:display_name]
-
alert_str = 'Added Component to Node('+display_name+')'
-
run_javascript("R8.Workspace.showAlert('#{alert_str}');")
-
-
return {}
-
end
-
-
1
def added_component_conf_ide(id)
-
node = get_object_by_id(id)
-
display_name = node[:display_name]
-
alert_str = 'Added Component to Node('+display_name+')'
-
# run_javascript("R8.Workspace.showAlert('#{alert_str}');")
-
-
return {:data=>alert_str}
-
end
-
-
1
def wspace_refresh(id)
-
c = ret_session_context_id()
-
tpl = R8Tpl::TemplateR8.new("node/wspace_refresh",user_context())
-
-
tpl.set_js_tpl_name('node_wspace_refresh')
-
# node = get_object_by_id(id)
-
# node.delete(:image_size)
-
node = Node.get_wspace_display(IDHandle[:c => c, :guid => id])
-
-
# TODO: temp hack to stub things out
-
node[:operational_status] = 'good'
-
node[:model_name] = 'node'
-
-
tpl.assign(:node,node)
-
tpl.assign(:base_images_uri,R8::Config[:base_images_uri])
-
-
num_components = (node[:component]||[]).map{|x|x[:id]}.uniq.size
-
tpl.assign(:num_components,num_components)
-
-
_node_vars = {}
-
_node_vars[:i18n] = get_model_i18n("node",user_context())
-
tpl.assign("_node",_node_vars)
-
-
tpl_result = tpl.render()
-
tpl_result[:panel] = 'item-'+node[:id].to_s
-
# tpl_result[:assign_type] = 'append'
-
p 'Panel IS:'+tpl_result[:panel]
-
-
return tpl_result
-
end
-
-
1
def get_components(id)
-
model_name = :component
-
field_set = Model::FieldSet.default(model_name)
-
component_list = get_objects(model_name,{:node_node_id=>id})
-
-
=begin
-
Expect something like:
-
node = Node.new(node_id)
-
component_list = node.get_components()
-
-
get_components should probably take a param to return sub list of type(s) of components
-
ie: get_components(['language'])
-
=end
-
# pp model_list
-
component_i18n = get_model_i18n('component',user_context())
-
-
component_list.each do |component|
-
pp '--------------------'
-
pp 'component:'+component[:display_name]
-
pp 'id:'+component[:id].to_s
-
component_name = component[:display_name].gsub('::','_')
-
-
component[:label] = component_i18n[(component[:ds_attributes]||{})[:ref].to_sym] if (component[:ds_attributes]||{})[:ref]
-
component[:label] ||= component_i18n[component_name.to_sym] || "component"
-
-
component[:onclick] = "R8.Workspace.Dock.loadDockPanel('component/wspace_dock_get_attributes/#{component[:id].to_s}');"
-
# component[:onclick] = "R8.Workspace.Dock.loadDockPanel('node/get_components/2147484111');"
-
end
-
-
return {} if component_list.empty?
-
component_list[0][:css_class] = 'first'
-
component_list[component_list.length-1][:css_class] = 'last'
-
-
tpl = R8Tpl::TemplateR8.new("workspace/dock_list",user_context())
-
# tpl.assign(:component_list,component_list)
-
js_tpl_name = 'wspace_dock_list'
-
tpl.set_js_tpl_name(js_tpl_name)
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
panel_id = request.params['panel_id']
-
-
panel_cfg_hash = {
-
:title=>{
-
:i18n=>component_i18n[:components]
-
},
-
:item_list=>component_list,
-
}
-
panel_cfg = JSON.generate(panel_cfg_hash)
-
# TODO: temp pass of
-
run_javascript("R8.Workspace.Dock.pushDockPanel2('0',#{panel_cfg},'#{js_tpl_name}');")
-
-
return {}
-
-
=begin
-
return {:data=>{
-
:panel_cfg=>panel_cfg
-
}
-
}
-
return {
-
# :content=>tpl.render(),
-
:data=> {
-
:component_list => component_list
-
},
-
:panel=>panel_id
-
}
-
=end
-
end
-
-
1
def wspace_render_ports(id=nil)
-
filter = [:and,[:eq,:is_port,true],[:eq,:port_is_external,true]]
-
cols = [:id,:display_name,:value_derived,:value_asserted]
-
field_set = Model::FieldSet.new(:attribute,cols)
-
ds = SearchObject.create_from_field_set(field_set,ret_session_context_id(),filter).create_dataset()
-
ds = ds.where(:param_node_id => id.to_i) if id
-
port_list = ds.all
-
port_list.each do |el|
-
val = el[:attribute_value]
-
el[:value] = (val.kind_of?(Hash) or val.kind_of?(Array)) ? JSON.generate(val) : val
-
end
-
-
# action_name = "list_ports_under_node"
-
# tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
# tpl.assign("port_list",port_list)
-
# return {:content => tpl.render()}
-
-
ports = JSON.generate(port_list)
-
run_javascript("R8.Workspace.renderItemPorts('#{id}',#{ports});")
-
-
return {}
-
end
-
-
1
def search
-
# TODO: harmonize with rest__list
-
search_cols = [:display_name]
-
-
filter_conjuncts = request.params.map do |name,value|
-
[:regex,name.to_sym,"^#{value}"] if search_cols.include?(name.to_sym)
-
end.compact
-
cols = NodeBindingRuleset.common_columns()
-
sp_hash = {
-
:cols => cols + [:ref]
-
}
-
-
unless filter_conjuncts.empty?
-
sp_hash[:filter] = [:and] + filter_conjuncts
-
end
-
node_list = Model.get_objs(model_handle(:node_binding_ruleset),sp_hash,:keep_ref_cols => true).each{|r|r.materialize!(cols)}
-
icon_dir = "#{R8::Config[:base_images_uri]}/v1/nodeIcons"
-
node_list.each do |node|
-
png = (node[:os_type] ? "#{node[:os_type]}.png" : "unknown-node.png")
-
node[:image_path] = "#{icon_dir}/#{png}"
-
node[:display_name] ||= node[ref]
-
node[:i18n] = node[:display_name]
-
end
-
{:data=>node_list}
-
end
-
end
-
end
-
-
# TODO: NODE-GROUP: these are target node groups; may rename
-
1
module DTK
-
1
class Node_groupController < AuthController
-
1
helper :node_group_helper
-
-
1
def rest__create()
-
display_name = ret_non_null_request_params(:display_name)
-
target_id,spans_target = ret_request_params(:target_id,:spans_target)
-
target_idh = target_idh_with_default(target_id)
-
opts = Hash.new
-
opts[:spans_target] = true if spans_target
-
new_ng_idh = NodeGroup.create_instance(target_idh,display_name,opts)
-
rest_ok_response(:node_group_id => new_ng_idh.get_id())
-
end
-
-
1
def rest__delete()
-
node_group = create_obj(:node_group_id)
-
node_group.delete()
-
rest_ok_response
-
end
-
-
1
def rest__list()
-
rest_ok_response NodeGroup.list(model_handle())
-
end
-
-
1
def rest__add_component()
-
node_group = create_obj(:node_group_id)
-
component_template, component_title = ret_component_template_and_title(:component_template_id)
-
new_component_idh = node_group.add_component(component_template,:component_title => component_title)
-
rest_ok_response(:component_id => new_component_idh.get_id())
-
end
-
-
1
def rest__delete_component()
-
node_group = create_obj(:node_group_id)
-
# not checking here if component_id points to valid object; check is in delete_component
-
component_id = ret_non_null_request_params(:component_id)
-
node_group.delete_component(id_handle(component_id,:component))
-
rest_ok_response
-
end
-
-
1
def rest__info_about()
-
node_group = create_obj(:node_group_id)
-
about = ret_non_null_request_params(:about).to_sym
-
rest_ok_response node_group.info_about(about)
-
end
-
-
1
def rest__get_attributes()
-
node_group = create_obj(:node_group_id)
-
filter = ret_request_params(:filter)
-
filter = filter && filter.to_sym
-
rest_ok_response node_group.get_attributes_print_form(Opts.new(:filter => filter))
-
end
-
-
# the body has an array each element of form
-
# {:pattern => PAT, :value => VAL}
-
# pat can be one of three forms
-
# 1 - an id
-
# 2 - a name of form ASSEM-LEVEL-ATTR or NODE/COMONENT/CMP-ATTR, or
-
# 3 - a pattern (TODO: give syntax) that can pick out multiple vars
-
# this returns same output as info about attributes, pruned for just new ones set
-
1
def rest__set_attributes()
-
node_group = create_obj(:node_group_id)
-
av_pairs = ret_params_av_pairs()
-
node_group.set_attributes(av_pairs)
-
rest_ok_response
-
end
-
-
1
def rest__get_members()
-
node_group = create_obj(:node_group_id)
-
rest_ok_response node_group.get_node_group_members()
-
end
-
-
1
def rest__create_task()
-
node_group_idh = ret_request_param_id_handle(:node_group_id,NodeGroup)
-
commit_msg = ret_request_params(:commit_msg)
-
task = Task.create_from_node_group(node_group_idh,commit_msg)
-
unless task
-
node_group = node_group_idh.create_object().update_object!(:display_name)
-
raise ErrorUsage.new("No nodes belong to node group (#{node_group[:display_name]})")
-
end
-
task.save!()
-
rest_ok_response :task_id => task.id
-
end
-
-
1
def rest__task_status()
-
node_group_idh = ret_request_param_id_handle(:node_group_id,NodeGroup)
-
format = (ret_request_params(:format)||:hash).to_sym
-
rest_ok_response Task::Status::NodeGroup.get_status(node_group_idh,:format => format)
-
end
-
-
-
# TODO: old methods that need to be re-evaluated
-
-
-
1
def save()
-
params = request.params
-
unless params["parent_model_name"] == "target"
-
super
-
else
-
target_idh = target_idh_with_default(params["parent_id"])
-
target_id = target_idh.get_id()
-
modified_params = {"parent_id" => target_id}.merge(params)
-
ret = super(modified_params)
-
if ret.is_ok?
-
ng_id = ret.data[:id]
-
target_idh.create_object().update_ui_for_new_item(ng_id)
-
end
-
ret
-
end
-
end
-
-
-
1
def rest__set_default_template_node()
-
node_group_id, template_node_id = ret_non_null_request_params(:node_group_id,:template_node_id)
-
node_group = create_object_from_id(node_group_id)
-
node_group.update(:canonical_template_node_id => template_node_id.to_i)
-
rest_ok_response
-
end
-
-
# TODO: initially implementing simple version taht takes no parameters and uses the canonical_member_id
-
1
def rest__clone_and_add_template_node()
-
node_group_id = ret_non_null_request_params(:node_group_id)
-
node_group = create_object_from_id(node_group_id)
-
unless template_node = node_group.get_canonical_template_node()
-
raise Error.new("Node group does not have a default template node set")
-
end
-
cloned_node_idh = node_group.clone_and_add_template_node(template_node)
-
rest_ok_response(:id => cloned_node_idh.get_id)
-
end
-
-
1
def delete()
-
id = request.params["id"]
-
node_group = create_object_from_id(id)
-
node_group.delete()
-
{:data => {:id=>id,:result=>true}}
-
end
-
end
-
end
-
-
1
module XYZ
-
1
class Node_interfaceController < AuthController
-
end
-
end
-
1
module DTK
-
1
class Node_moduleController < AuthController
-
1
helper :module_helper
-
-
1
def rest__test_generate_dsl()
-
node_module = create_obj(:node_module_id)
-
dsl_created_info = node_module.test_generate_dsl()
-
STDOUT << dsl_created_info[:content] << "\n"
-
rest_ok_response
-
end
-
-
#### create and delete actions ###
-
1
def rest__create()
-
module_name = ret_non_null_request_params(:module_name)
-
project = get_default_project()
-
version = nil #TODO: stub
-
local_params = local_params(:node_module,module_name,:version => version)
-
module_repo_info = NodeModule.create_module(project,local_params)[:module_repo_info]
-
-
rest_ok_response module_repo_info
-
end
-
-
1
def rest__update_from_initial_create()
-
node_module = create_obj(:node_module_id)
-
repo_id,commit_sha = ret_non_null_request_params(:repo_id,:commit_sha)
-
repo_idh = id_handle(repo_id,:repo)
-
version = ret_version()
-
scaffold = ret_request_params(:scaffold_if_no_dsl)
-
opts = {:scaffold_if_no_dsl => scaffold, :do_not_raise => true, :process_provider_specific_dependencies => false}
-
rest_ok_response node_module.import_from_file(commit_sha,repo_idh,version,opts)
-
end
-
-
1
def rest__update_model_from_clone()
-
node_module = create_obj(:node_module_id)
-
commit_sha = ret_non_null_request_params(:commit_sha)
-
version = ret_version()
-
diffs_summary = ret_diffs_summary()
-
opts = Hash.new
-
if ret_request_param_boolean(:internal_trigger)
-
opts.merge!(:do_not_raise => true)
-
end
-
if ret_request_param_boolean(:force_parse)
-
opts.merge!(:force_parse=> true)
-
end
-
dsl_created_info = node_module.update_model_from_clone_changes?(commit_sha,diffs_summary,version,opts)
-
rest_ok_response dsl_created_info
-
end
-
-
1
def rest__delete()
-
node_module = create_obj(:node_module_id)
-
module_info = node_module.delete_object()
-
rest_ok_response module_info
-
end
-
-
1
def rest__delete_version()
-
node_module = create_obj(:node_module_id)
-
version = ret_version()
-
module_info = node_module.delete_version(version)
-
rest_ok_response module_info
-
end
-
-
#### end: create and delete actions ###
-
-
#### list and info actions ###
-
1
def rest__list()
-
diff = ret_request_params(:diff)
-
project = get_default_project()
-
datatype = :module
-
remote_repo_base = ret_remote_repo_base()
-
-
opts = Opts.new(:project_idh => project.id_handle())
-
if detail = ret_request_params(:detail_to_include)
-
opts.merge!(:detail_to_include => detail.map{|r|r.to_sym})
-
end
-
-
opts.merge!(:remote_repo_base => remote_repo_base, :diff => diff)
-
datatype = :module_diff if diff
-
-
rest_ok_response NodeModule.list(opts), :datatype => datatype
-
end
-
-
1
def rest__get_workspace_branch_info()
-
node_module = create_obj(:node_module_id)
-
version = ret_version()
-
rest_ok_response node_module.get_workspace_branch_info(version)
-
end
-
-
1
def rest__info()
-
module_id = ret_request_param_id_optional(:node_module_id, ::DTK::NodeModule)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
rest_ok_response NodeModule.info(model_handle(), module_id, opts)
-
end
-
-
1
def rest__pull_from_remote()
-
rest_ok_response pull_from_remote_helper(NodeModule)
-
end
-
-
1
def rest__remote_chmod()
-
response = chmod_from_remote_helper()
-
rest_ok_response(response)
-
end
-
-
1
def rest__remote_chown()
-
chown_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__confirm_make_public()
-
rest_ok_response confirm_make_public_helper()
-
end
-
-
1
def rest__remote_collaboration()
-
collaboration_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__list_remote_collaboration()
-
response = list_collaboration_from_remote_helper()
-
rest_ok_response response
-
end
-
-
1
def rest__versions()
-
node_module = create_obj(:node_module_id)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
-
rest_ok_response node_module.local_and_remote_versions(client_rsa_pub_key, opts)
-
end
-
-
1
def rest__info_about()
-
node_module = create_obj(:node_module_id)
-
about = ret_non_null_request_params(:about).to_sym
-
component_template_id = ret_request_params(:component_template_id)
-
unless AboutEnum.include?(about)
-
raise ErrorUsage::BadParamValue.new(:about,AboutEnum)
-
end
-
rest_ok_response node_module.info_about(about, component_template_id)
-
end
-
-
1
AboutEnum = [:components, :attributes, :instances]
-
-
#### end: list and info actions ###
-
-
#### actions to interact with remote repos ###
-
# TODO: rename; this is just called by install; import ops call create route
-
1
def rest__import()
-
rest_ok_response install_from_dtkn_helper(:node_module)
-
end
-
-
# TODO: rename; this is just called by publish
-
1
def rest__export()
-
node_module = create_obj(:node_module_id)
-
rest_ok_response publish_to_dtkn_helper(node_module)
-
end
-
-
-
# this should be called when the module is linked, but the specfic version is not
-
1
def rest__import_version()
-
node_module = create_obj(:node_module_id)
-
remote_repo = ret_remote_repo()
-
version = ret_version()
-
rest_ok_response node_module.import_version(remote_repo,version)
-
end
-
-
# TODO: ModuleBranch::Location: harmonize this signature with one for service module
-
1
def rest__delete_remote()
-
remote_module_name = ret_non_null_request_params(:remote_module_name)
-
remote_namespace = ret_request_params(:remote_module_namespace)
-
force_delete = ret_request_param_boolean(:force_delete)
-
-
-
remote_params = remote_params_dtkn(:node_module,remote_namespace,remote_module_name)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
project = get_default_project()
-
-
NodeModule.delete_remote(project, remote_params, client_rsa_pub_key, force_delete)
-
rest_ok_response
-
end
-
-
1
def rest__list_remote()
-
rest_ok_response NodeModule.list_remotes(model_handle, ret_request_params(:rsa_pub_key)), :datatype => :module_remote
-
end
-
-
# get remote_module_info; throws an access rights usage error if user does not have access
-
1
def rest__get_remote_module_info()
-
node_module = create_obj(:node_module_id)
-
rest_ok_response get_remote_module_info_helper(node_module)
-
end
-
-
#### end: actions to interact with remote repo ###
-
-
#### actions to manage workspace
-
-
1
def rest__create_new_version()
-
node_module = create_obj(:node_module_id)
-
version = ret_version()
-
-
node_module.create_new_version(version)
-
rest_ok_response
-
end
-
-
1
def rest__create_new_dsl_version()
-
node_module = create_obj(:node_module_id)
-
dsl_version = ret_non_null_request_params(:dsl_version).to_i
-
module_version = ret_version()
-
format = :json
-
node_module.create_new_dsl_version(dsl_version,format,module_version)
-
rest_ok_response
-
end
-
-
#### end: actions to manage workspace and promote changes from workspace to library ###
-
-
1
def rest__push_to_mirror()
-
node_module = create_obj(:node_module_id)
-
mirror_host = ret_non_null_request_params(:mirror_host)
-
node_module.push_to_mirror(mirror_host)
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class Port_linkController < AuthController
-
1
def save(explicit_hash=nil,opts={})
-
raise Error.new("TODO: this is now deprecated: PortLink.create_port_and_attr_links__clone_if_needed has changed")
-
hash = explicit_hash || request.params
-
return Error.new("not implemented update of port link") if hash["id"]
-
-
port_link = {
-
:input_id => hash["input_id"].to_i,
-
:ouput_id => hash["output_id"].to_i
-
}
-
parent_id_handle = id_handle(hash["parent_id"],hash["parent_model_name"])
-
handle_errors do
-
ret = PortLink.create_port_and_attr_links__clone_if_needed(parent_id_handle,[port_link])
-
new_id = ret[:new_port_links].first
-
if hash["return_model"] == "true"
-
return {:data=>
-
{
-
:link =>get_object_by_id(new_id,:port_link),
-
:link_changes => ret
-
}
-
}
-
end
-
-
return new_id if opts[:return_id]
-
redirect = (not (hash["redirect"].to_s == "false"))
-
redirect "/xyz/#{model_name()}/display/#{new_id.to_s}" if redirect
-
end
-
end
-
end
-
end
-
-
1
module XYZ
-
1
class ProjectController < AuthController
-
1
def test_group_attrs(datacenter_id=nil)
-
redirect = "/xyz/project/test_group_attrs/#{(datacenter_id||"").to_s}"
-
unless datacenter_id
-
datacenter_id = Model.get_objs(model_handle(:datacenter),{:cols => [:id]}).first[:id]
-
end
-
pending_changes = flat_list_pending_changes_in_datacenter(datacenter_id.to_i)
-
commit_task = create_task_from_pending_changes(pending_changes)
-
-
augmented_attr_list = Attribute.augmented_attribute_list_from_task(commit_task)
-
-
opts = {:types_to_keep => [:unset_required]}
-
grouped_attrs = Attribute.ret_grouped_attributes!(augmented_attr_list,opts)
-
# pp grouped_attrs.each{|x| pp [x[:component][:display_name],x[:display_name],x[:attr_val_type]]}
-
-
i18n_mapping = get_i18n_mappings_for_models(:attribute,:component)
-
attr_list = grouped_attrs.map do |a|
-
name = a[:display_name]
-
attr_i18n = i18n_string(i18n_mapping,:attribute,name)
-
component_i18n = i18n_string(i18n_mapping,:component,a[:component][:display_name])
-
node_i18n = a[:node][:display_name]
-
qualified_attr_i18n = "#{node_i18n}/#{component_i18n}/#{attr_i18n}"
-
{
-
:id => a[:unraveled_attribute_id],
-
:name => name,
-
:value => a[:attribute_value],
-
:i18n => qualified_attr_i18n
-
}
-
end
-
-
tpl = R8Tpl::TemplateR8.new("project/attributes_edit",user_context())
-
tpl.assign("redirect",redirect)
-
tpl.assign("field_list",attr_list)
-
return {:content => tpl.render()}
-
end
-
-
1
def save_attributes(explicit_hash=nil)
-
attr_val_hash = explicit_hash || request.params.dup
-
redirect = attr_val_hash.delete("redirect")
-
# convert empty strings to nils
-
attr_val_hash.each{|k,v|attr_val_hash[k] = nil if v.kind_of?(String) and v.empty?}
-
-
# TODO: if not using c_ prfix remove from view and remobe below
-
attr_val_hash = attr_val_hash.inject({}) do |h,(k,v)|
-
h.merge(k.gsub(/^c__[0-9]+__/,"") => v)
-
end
-
-
attribute_rows = AttributeComplexType.ravel_raw_post_hash(attr_val_hash,:attribute)
-
Attribute.update_and_propagate_attributes(model_handle(:attribute),attribute_rows)
-
redirect redirect
-
end
-
-
1
def create(explicit_hash=nil)
-
-
params = request.params
-
pp ["project_create",params]
-
Project.create_new_project(model_handle,params["name"],params["type"])
-
return {}
-
end
-
-
1
def destroy_and_delete_nodes(project_id=nil) #allowing to be nil for testing when only one project
-
unless project_id
-
project_id = Model.get_objs(model_handle,{:cols => [:id]}).first[:id]
-
end
-
create_object_from_id(project_id).destroy_and_delete_nodes()
-
return {:content => {}}
-
end
-
end
-
end
-
1
module XYZ
-
1
class RepoController < Controller
-
1
def rest__delete()
-
repo_id = ret_non_null_request_params(:repo_id)
-
Repo.delete(id_handle(repo_id))
-
rest_ok_response
-
end
-
-
# TODO: using maybe just temporarily to import when adding files
-
1
def rest__synchronize_target_repo()
-
# TODO: check that refrershing all appropriate implemnations by just using project_project_id is not null test
-
repo_id = ret_non_null_request_params(:repo_id)
-
repo = create_object_from_id(repo_id)
-
sp_hash = {
-
:cols => [:id, :group_id, :display_name, :local_dir],
-
:filter => [:and, [:eq, :repo_id, repo_id], [:neq, :project_project_id, nil]]
-
}
-
impls = Model.get_objs(model_handle(:implementation),sp_hash)
-
raise Error.new("Expecting to just find one matching implementation") unless impls.size == 1
-
impl = impls.first
-
impl.create_file_assets_from_dir_els()
-
impl.add_contained_files_and_push_to_repo()
-
rest_ok_response
-
end
-
end
-
end
-
1
module XYZ
-
1
class Search_objectController < AuthController
-
1
def save(explicit_hash=nil)
-
hash_assignments = explicit_hash || request.params.dup
-
redirect = (not (hash_assignments.delete("redirect").to_s == "false"))
-
id = super(explicit_hash,:return_id => true)
-
SearchObject.save_list_view_in_cache(id,hash_assignments,user_context())
-
redirect "/xyz/#{model_name()}/display/#{id.to_s}" if redirect
-
end
-
end
-
end
-
1
module DTK
-
1
class Service_moduleController < AuthController
-
1
helper :module_helper
-
1
helper :assembly_helper
-
1
helper :remotes_helper
-
-
# TODO: for debugging; will be removed
-
1
def rest__debug_get_project_trees()
-
ServiceModule.get_project_trees(model_handle)
-
rest_ok_response
-
end
-
-
1
def rest__debug_get_ports(service_module_id)
-
service_module = create_object_from_id(service_module_id)
-
service_module.get_ports()
-
rest_ok_response
-
end
-
# end: for debugging; will be removed
-
-
#### actions to interact with remote repos ###
-
1
def rest__list_remote()
-
rsa_pub_key = ret_request_params(:rsa_pub_key)
-
datatype_opts = {:datatype => :module_remote}
-
module_list = ServiceModule.list_remotes(model_handle,rsa_pub_key)
-
rest_ok_response filter_by_namespace(module_list), datatype_opts
-
end
-
-
1
def rest__list_assemblies()
-
service_module = create_obj(:service_module_id)
-
rest_ok_response service_module.get_assembly_templates()
-
end
-
-
1
def rest__list_instances()
-
service_module = create_obj(:service_module_id)
-
rest_ok_response service_module.get_assembly_instances()
-
end
-
-
1
def rest__list_component_modules()
-
service_module = create_obj(:service_module_id)
-
opts = Opts.new(:detail_to_include=>[:versions])
-
rest_ok_response service_module.list_component_modules(opts)
-
end
-
-
# TODO: rename; this is just called by install; import ops call create route
-
1
def rest__import()
-
rest_ok_response install_from_dtkn_helper(:service_module)
-
end
-
-
# TODO: rename; this is just called by publish
-
1
def rest__export()
-
service_module = create_obj(:service_module_id)
-
rest_ok_response publish_to_dtkn_helper(service_module)
-
end
-
-
# this should be called when the module is linked, but the specfic version is not
-
1
def rest__import_version()
-
service_module = create_obj(:service_module_id)
-
remote_repo = ret_remote_repo()
-
version = ret_version()
-
rest_ok_response service_module.import_version(remote_repo,version)
-
end
-
-
-
# get remote_module_info; throws an access rights usage error if user does not have access
-
1
def rest__get_remote_module_info()
-
service_module = create_obj(:service_module_id)
-
rest_ok_response get_remote_module_info_helper(service_module)
-
end
-
-
1
def rest__pull_from_remote()
-
rest_ok_response pull_from_remote_helper(ServiceModule)
-
end
-
-
1
def rest__remote_chmod()
-
response = chmod_from_remote_helper()
-
rest_ok_response(response)
-
end
-
-
1
def rest__remote_chown()
-
chown_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__confirm_make_public()
-
rest_ok_response confirm_make_public_helper()
-
end
-
-
1
def rest__remote_collaboration()
-
collaboration_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__list_remote_collaboration()
-
response = list_collaboration_from_remote_helper()
-
rest_ok_response response
-
end
-
-
#### end actions to interact with remote repos ###
-
-
1
def rest__list()
-
diff = ret_request_params(:diff)
-
project = get_default_project()
-
datatype = :module
-
namespace = ret_request_params(:module_namespace)
-
remote_repo_base = ret_remote_repo_base()
-
-
opts = Opts.new(:project_idh => project.id_handle())
-
if detail = ret_request_params(:detail_to_include)
-
opts.merge!(:detail_to_include => detail.map{|r|r.to_sym})
-
end
-
-
opts.merge!(:remote_repo_base => remote_repo_base, :diff => diff, :namespace => namespace)
-
datatype = :module_diff if diff
-
-
# rest_ok_response filter_by_namespace(ServiceModule.list(opts)), :datatype => datatype
-
rest_ok_response ServiceModule.list(opts), :datatype => datatype
-
end
-
-
1
def rest__versions()
-
service_module = create_obj(:service_module_id)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
-
rest_ok_response service_module.local_and_remote_versions(client_rsa_pub_key, opts)
-
end
-
-
1
def rest__list_remote_diffs()
-
service_module = create_obj(:service_module_id)
-
version = nil
-
rest_ok_response service_module.list_remote_diffs(version)
-
end
-
-
1
def rest__info()
-
module_id = ret_request_param_id_optional(:service_module_id, ::DTK::ServiceModule)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
-
rest_ok_response ServiceModule.info(model_handle(), module_id, opts)
-
end
-
-
1
def rest__info_about()
-
service_module = create_obj(:service_module_id)
-
about = ret_non_null_request_params(:about).to_sym
-
unless AboutEnum.include?(about)
-
raise ErrorUsage::BadParamValue.new(:about, AboutEnum)
-
end
-
rest_ok_response service_module.info_about(about)
-
end
-
1
AboutEnum = ["assembly-templates".to_sym,:components]
-
-
1
def rest__get_workspace_branch_info()
-
service_module = create_obj(:service_module_id)
-
version = ret_request_params(:version)
-
rest_ok_response service_module.get_workspace_branch_info(version)
-
end
-
-
1
def rest__create()
-
module_name = ret_non_null_request_params(:module_name)
-
namespace = ret_request_param_module_namespace?()
-
config_agent_type = ret_config_agent_type()
-
project = get_default_project()
-
version = nil #TODO: stub
-
-
opts_local_params = (namespace ? {:namespace=>namespace} : {})
-
local_params = local_params(:service_module, module_name, opts_local_params)
-
-
opts_create_mod = Opts.new(
-
:config_agent_type => ret_config_agent_type()
-
)
-
init_hash_response = ServiceModule.create_module(project, local_params, opts_create_mod)
-
-
rest_ok_response(:service_module_id => init_hash_response[:module_branch_idh].get_id(), :repo_info => init_hash_response[:module_repo_info])
-
end
-
-
1
def rest__create_new_version()
-
service_module = create_obj(:service_module_id)
-
version = ret_version()
-
-
service_module.create_new_version(version)
-
rest_ok_response
-
end
-
-
1
def rest__delete()
-
service_module = create_obj(:service_module_id)
-
module_info = service_module.delete_object()
-
rest_ok_response module_info
-
end
-
-
1
def rest__delete_version()
-
service_module = create_obj(:service_module_id)
-
version = ret_version()
-
module_info = service_module.delete_version(version)
-
rest_ok_response module_info
-
end
-
-
1
def rest__delete_remote()
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
remote_namespace = ret_request_params(:remote_module_namespace)
-
force_delete = ret_request_param_boolean(:force_delete)
-
-
-
opts = Hash.new
-
opts.merge!(:namespace => remote_namespace) unless remote_namespace.empty?
-
-
remote_namespace,remote_module_name,version = Repo::Remote::split_qualified_name(ret_non_null_request_params(:remote_module_name), opts)
-
remote_params = remote_params_dtkn(:service_module,remote_namespace,remote_module_name,version)
-
-
project = get_default_project()
-
ServiceModule.delete_remote(project, remote_params, client_rsa_pub_key, force_delete)
-
-
rest_ok_response
-
end
-
-
#
-
# Method will check new dependencies on repo manager and report missing dependencies.
-
# Response will return list of modules for given component.
-
#
-
1
def rest__resolve_pull_from_remote()
-
rest_ok_response resolve_pull_from_remote(:service_module)
-
end
-
-
1
def rest__delete_assembly_template()
-
assembly_template_idh = ret_assembly_template_idh()
-
rest_ok_response Assembly::Template.delete_and_ret_module_repo_info(assembly_template_idh)
-
end
-
-
1
def rest__update_model_from_clone()
-
service_module = create_obj(:service_module_id)
-
commit_sha = ret_non_null_request_params(:commit_sha)
-
version = ret_version()
-
diffs_summary = ret_diffs_summary()
-
opts = ret_params_hash(:task_action)
-
opts.merge!(:auto_update_module_refs => true) # TODO: might make this contingent
-
if ret_request_param_boolean(:internal_trigger)
-
opts.merge!(:do_not_raise => true )
-
end
-
if mod_type = ret_request_params(:modification_type)
-
opts.merge!(:modification_type => mod_type.to_sym)
-
end
-
if ret_request_param_boolean(:force_parse)
-
opts.merge!(:force_parse=> true)
-
end
-
rest_ok_response service_module.update_model_from_clone_changes?(commit_sha,diffs_summary,version,opts)
-
end
-
-
1
def rest__set_component_module_version()
-
service_module = create_obj(:service_module_id)
-
component_module = create_obj(:component_module_id,ComponentModule)
-
version = ret_version()
-
clone_update_info = service_module.set_component_module_version(component_module,version)
-
rest_ok_response clone_update_info
-
end
-
-
1
def rest__info_git_remote()
-
service_module = create_obj(:service_module_id)
-
info_git_remote(service_module)
-
end
-
-
1
def rest__add_git_remote()
-
service_module = create_obj(:service_module_id)
-
add_git_remote(service_module)
-
end
-
-
1
def rest__remove_git_remote()
-
service_module = create_obj(:service_module_id)
-
remove_git_remote(service_module)
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class State_changeController < AuthController
-
1
def rest__list_pending_changes(target_id=nil)
-
target_idh = target_idh_with_default(target_id)
-
rest_ok_response StateChange.list_pending_changes(target_idh)
-
end
-
end
-
end
-
1
module DTK
-
1
class TargetController < AuthController
-
1
helper :target_helper
-
-
1
PROVIDER_PREFIX = 'provider'
-
1
PROVIDER_DELIMITER = ':::'
-
-
1
def rest__list()
-
subtype = ret_target_subtype()
-
parent_id = ret_request_params(:parent_id)
-
-
response =
-
if subtype.eql? :instance
-
opts = ((parent_id && !parent_id.empty?) ? { :filter => [:eq, :parent_id, parent_id]} : Hash.new)
-
Target::Instance.list(model_handle(), opts)
-
elsif subtype.eql? :template
-
Target::Template.list(model_handle())
-
else
-
raise ErrorUsage.new("Illegal subtype param (#{subtype})")
-
end
-
rest_ok_response response
-
end
-
-
1
def rest__info()
-
target = create_obj(:target_id, Target::Instance)
-
rest_ok_response target.info(),:encode_into => :yaml
-
end
-
-
1
def rest__import_nodes()
-
target = create_obj(:target_id, Target::Instance)
-
#TODO: formatting to get right fields is done on client side now; should be done on server side
-
#method Node::TargetRef:InventoryData.new can be removed or modified once that is done
-
inventory_data_hash = ret_non_null_request_params(:inventory_data)
-
inventory_data = Node::TargetRef::Input::InventoryData.new(inventory_data_hash)
-
rest_ok_response Target::Instance.import_nodes(target, inventory_data)
-
end
-
-
1
def rest__install_agents()
-
target = create_obj(:target_id)
-
target.install_agents()
-
rest_ok_response
-
end
-
-
1
def rest__create_install_agents_task()
-
target = create_obj(:target_id)
-
-
unmanaged_nodes = target.get_objs(:cols => [:unmanaged_nodes]).map{|r|r[:node]}
-
return rest_ok_response if unmanaged_nodes.empty?
-
-
opts = Hash.new
-
if num_nodes = ret_request_params(:num_nodes)
-
opts.merge!(:debug_num_nodes => num_nodes)
-
end
-
task = Task.create_install_agents_task(target,unmanaged_nodes,opts)
-
task.save!()
-
-
rest_ok_response :task_id => task.id
-
end
-
-
1
def rest__task_status()
-
target = create_obj(:target_id)
-
target_idh = target.id_handle()
-
-
format = (ret_request_params(:format)||:hash).to_sym
-
response = Task::Status::Target.get_status(target_idh,:format => format)
-
rest_ok_response response
-
end
-
-
# create target instance
-
1
def rest__create()
-
provider = create_obj(:provider_id, Target::Template)
-
iaas_properties = ret_non_null_request_params(:iaas_properties).inject(Hash.new){|h,(k,v)|h.merge(k.to_sym => v)}
-
target_type = (ret_request_params(:type) || :ec2_classic).to_sym
-
opts = ret_params_hash(:target_name)
-
project_idh = get_default_project().id_handle()
-
-
#TODO: for legacy: can be removed when clients upgraded
-
iaas_properties[:region] ||= ret_request_params(:region)
-
-
unless [:ec2_classic,:ec2_vpc].include?(target_type)
-
raise ErrorUsage.new("Target type '#{target_type}' is not supported")
-
end
-
Target::Instance.create_target_ec2(project_idh, provider, target_type, iaas_properties, opts)
-
rest_ok_response
-
end
-
-
1
def rest__create_provider()
-
iaas_type = ret_non_null_request_params(:iaas_type)
-
provider_name = ret_non_null_request_params(:provider_name)
-
iaas_properties = ret_request_params(:iaas_properties)
-
params_hash = ret_params_hash(:description)
-
no_bootstrap = ret_request_param_boolean(:no_bootstrap) || true
-
-
project_idh = get_default_project().id_handle()
-
# setting :error_if_exists only if no bootstrap
-
opts = {:raise_error_if_exists => no_bootstrap}
-
provider = Target::Template.create_provider?(project_idh,iaas_type,provider_name,iaas_properties,params_hash,opts)
-
response = {:provider_id => provider.id}
-
-
# TODO: removing until provides for fact that need to know when ec2 whether vpc or classic
-
# unless no_bootstrap
-
# # select_region could be nil
-
# created_targets_info = provider.create_bootstrap_targets?(project_idh,selected_region)
-
# response.merge!(:created_targets => created_targets_info)
-
# end
-
rest_ok_response response
-
end
-
-
1
def rest__delete_and_destroy()
-
type = (ret_request_params(:type)|| :instance).to_sym # can be :instance or :template
-
# TODO: stubbed now to have force being true; now only Target::Template.delete_and_destroy supports non force; so not passing in
-
# force param to Target::Instance.delete_and_destroy
-
force = true
-
response = {}
-
case type
-
when :template
-
provider = create_obj(:target_id, Target::Template)
-
response = Target::Template.delete_and_destroy(provider,:force => force)
-
when :instance
-
target_instance = create_obj(:target_id, Target::Instance)
-
response = Target::Instance.delete_and_destroy(target_instance)
-
else
-
raise ErrorUsage.new("Illegal type '#{type}'")
-
end
-
rest_ok_response response
-
end
-
-
1
def rest__set_properties()
-
target_instance = create_obj(:target_id, Target::Instance)
-
iaas_properties = ret_request_params(:iaas_properties)
-
Target::Instance.set_properties(target_instance, iaas_properties)
-
rest_ok_response
-
end
-
-
1
def rest__set_default()
-
target_instance = create_obj(:target_id, Target::Instance)
-
update_workspace_target = true #TODO: stubbed might make this option passed by client
-
Target::Instance.set_default_target(target_instance,:update_workspace_target => update_workspace_target)
-
rest_ok_response
-
end
-
-
1
def rest__info_about()
-
target = create_obj(:target_id)
-
about = ret_non_null_request_params(:about).to_sym
-
opts = ret_params_hash(:detail_level, :include_workspace)
-
rest_ok_response target.info_about(about, opts)
-
end
-
-
-
1
def get_ports(id)
-
target = create_object_from_id(id)
-
port_list = target.get_ports("component_external","component_internal_external")
-
return {:data=>port_list}
-
end
-
-
1
def get_nodes_status(id)
-
target = create_object_from_id(id)
-
nodes_status = target.get_and_update_nodes_status()
-
# pp [:node_config_changes,target.get_node_config_changes()]
-
-
return {:data=>nodes_status}
-
end
-
-
1
def edit
-
-
end
-
-
1
def display
-
-
end
-
-
1
def load_vspace(target_id)
-
target = id_handle(target_id,:datacenter).create_object()
-
target_id = target.id()
-
-
# TODO: how to retrieve fields from instance?
-
target_hash = get_object_by_id(target_id,:datacenter)
-
-
# TODO: revisit when cleaning up toolbar, plugins and user settings
-
=begin
-
tpl = R8Tpl::TemplateR8.new("workspace/notification_list",user_context())
-
tpl.set_js_tpl_name("notification_list")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
=end
-
include_js('plugins/search.cmdhandler')
-
view_space = {
-
:type => 'datacenter',
-
:i18n => 'Environments',
-
:object => target_hash
-
}
-
# v_space_obj = JSON.generate(view_space)
-
# run_javascript("R8.Workspace.pushViewSpace(#{v_space_obj});")
-
-
#--------Setup Toolbar for access each group from ACL's---------
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
user_has_toolbar_access = true
-
user_group_tool_list = Array.new
-
user_group_tool_list << 'quicksearch'
-
toolbar_def = {:tools => user_group_tool_list}
-
-
include_js('toolbar.quicksearch.r8')
-
-
tpl_info_hash = Hash.new
-
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_display",user_context())
-
tpl.set_js_tpl_name("ng_wspace_display")
-
tpl_info_hash[:node_group] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node_group][:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display",user_context())
-
tpl.set_js_tpl_name("node_wspace_display")
-
tpl_info_hash[:node] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node][:src])
-
-
tpl = R8Tpl::TemplateR8.new("datacenter/wspace_monitor_display",user_context())
-
tpl.set_js_tpl_name("wspace_monitor_display")
-
tpl_info_hash[:monitor] = tpl.render()
-
include_js_tpl(tpl_info_hash[:monitor][:src])
-
-
##### ----------------- add in model info
-
model_list = target.get_items()
-
-
items = model_list.map do |object|
-
object_id_sym = object.id.to_s.to_sym
-
ui = ((dc_hash[:ui]||{})[:items]||{})[object_id_sym] || (object[:ui]||{})[target_id.to_s.to_sym]
-
-
obj_tags = object[:display_name].split(',')
-
model_name = object.model_name
-
type = (obj_tags.include?("monitor")) ? :monitor : model_name
-
{
-
:type => type.to_s,
-
:model => model_name.to_s,
-
:object => object,
-
:toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info_hash[type][:template_callback],
-
:ui => ui,
-
:tags => obj_tags
-
}
-
end
-
view_space[:items] = items
-
view_space_json = JSON.generate(view_space)
-
# run_javascript("R8.Workspace.pushViewSpace(#{view_space_json});")
-
run_javascript("R8.IDE.pushViewSpace(#{view_space_json});")
-
-
-
#---------------------------------------------
-
-
return {:data=>''}
-
end
-
-
1
def get_view_items(id)
-
target = id_handle(id,:target).create_object()
-
target_id = target.id()
-
-
dc_hash = get_object_by_id(target_id,:target)
-
-
view_space = {
-
:type => 'datacenter',
-
:i18n => 'Environments',
-
:object => dc_hash
-
}
-
model_list = target.get_items()
-
-
tpl_info_hash = {}
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_display",user_context())
-
tpl.set_js_tpl_name("ng_wspace_display")
-
tpl_info_hash[:node_group] = tpl.render()
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display_ide",user_context())
-
tpl.set_js_tpl_name("node_wspace_display_ide")
-
tpl_info_hash[:node] = tpl.render()
-
-
tpl = R8Tpl::TemplateR8.new("datacenter/wspace_monitor_display",user_context())
-
tpl.set_js_tpl_name("wspace_monitor_display")
-
tpl_info_hash[:monitor] = tpl.render()
-
-
items = model_list.map do |object|
-
object_id_sym = object.id.to_s.to_sym
-
ui = ((dc_hash[:ui]||{})[:items]||{})[object_id_sym] || (object[:ui]||{})[target_id.to_s.to_sym]
-
-
obj_tags = object[:display_name].split(',')
-
model_name = object.model_name
-
type = (obj_tags.include?("monitor")) ? :monitor : model_name
-
{
-
:type => type.to_s,
-
:model => model_name.to_s,
-
:object => object,
-
# :toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info_hash[type][:template_callback],
-
:ui => ui,
-
:tags => obj_tags
-
}
-
end
-
view_space[:items] = items
-
# view_space_json = JSON.generate(view_space)
-
# run_javascript("R8.Workspace.pushViewSpace(#{view_space_json});")
-
-
return {:data=>view_space}
-
end
-
-
1
def create()
-
# TODO: Should we remove this method?
-
return {}
-
end
-
-
1
def add_item(id)
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
target = id_handle(id).create_object()
-
-
override_attrs = request.params["ui"] ? {:ui=>request.params["ui"]} : {}
-
-
model_id_handle = id_handle(request.params["model_id"].to_i,request.params["model"].to_sym)
-
if request.params["model"] == "node"
-
node_binding_rs = create_object_from_id(request.params["model_id"],:node_binding_ruleset)
-
new_item_id = node_binding_rs.clone_or_match(target).get_id()
-
else
-
new_item_id = target.add_item(model_id_handle,override_attrs)
-
end
-
# TODO: how do we get field info from model instance?
-
dc_hash = get_object_by_id(id,:datacenter)
-
dc_ui = dc_hash[:ui].nil? ? {:items=>{}} : dc_hash[:ui]
-
# TODO: cleanup later, right now ui req param indexed by dc id from old style
-
ui_params = JSON.parse(request.params["ui"])
-
# dc_ui[:items][new_item_id.to_s.to_sym] = ui_params[id.to_s]
-
dc_ui[:items][new_item_id.to_s.to_sym] = ui_params[id]
-
# TODO: any way to update a model from its object once an instance is created?
-
update_from_hash(id,{:ui=>dc_ui})
-
-
-
# pp '++++++++++++++++++++++++++++++++++++++'
-
# pp 'SHOULD HAVE UPDATED FROM HASH FOR TARGET OBJECT:'
-
# pp dc_ui
-
-
# TODO: clean this up,hack to update UI params for newly cloned object
-
# update_from_hash(id,{:ui=>hash["ui"]})
-
-
# hash["redirect"] ? redirect_route = "/xyz/#{hash["redirect"]}/#{id.to_s}" : redirect_route = "/xyz/#{model_name()}/display/#{id.to_s}"
-
-
if request.params["model_redirect"]
-
base_redirect = "/xyz/#{request.params["model_redirect"]}/#{request.params["action_redirect"]}"
-
redirect_id = request.params["id_redirect"].match(/^\*/) ? new_item_id.to_s : request.params["id_redirect"]
-
redirect_route = "#{base_redirect}/#{redirect_id}"
-
request_params = ''
-
expected_params = ['model_redirect','action_redirect','id_redirect','target_id','target_model_name']
-
request.params.each do |name,value|
-
if !expected_params.include?(name)
-
request_params << '&' if request_params != ''
-
request_params << "#{name}=#{value}"
-
end
-
end
-
ajax_request? ? redirect_route += '.json' : nil
-
redirect_route << URI.encode("?#{request_params}") if request_params != ''
-
else
-
redirect_route = "/xyz/#{model_name()}/display/#{new_item_id.to_s}"
-
ajax_request? ? redirect_route += '.json' : nil
-
end
-
-
redirect redirect_route
-
end
-
-
1
def get_links(id)
-
target = id_handle(id,:datacenter).create_object()
-
item_list = JSON.parse(request.params["item_list"])
-
item_list = item_list.reject do |x|
-
Log.error("get links missing needed params") unless x["id"] and x["model"]
-
end
-
# TODO: move this call into underlying get_links call,
-
item_list = item_list.map{|x|id_handle(x["id"].to_i,x["model"].to_sym)}
-
# TODO: make get_links an instance method, should pull all links from children if item_list is []/nil
-
raise Error.new("Target::get_ports_links is deprecated")
-
link_list = target.class.get_port_links(item_list,"component_external")
-
return {'data'=>link_list}
-
end
-
-
1
def get_warnings(id)
-
datacenter = get_object_by_id(id,:datacenter)
-
notification_list = datacenter.get_violation_info("warning")
-
notification_list.each_with_index do |n,index|
-
notification_list[index][:type] = "warning"
-
end
-
# DEBUG
-
# pp [:warnings,notification_list]
-
return {:data=>notification_list}
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class TaskController < AuthController
-
1
helper :task_helper
-
-
1
def rest__status()
-
task_id,detail_level = ret_request_params(:task_id,:detail_level)
-
detail_level = (detail_level||:summary).to_sym
-
unless task_id
-
# TODO: use Task.get_top_level_most_recent_task(model_handle,filter=nil)
-
tasks = Task.get_top_level_tasks(model_handle).sort{|a,b| b[:updated_at] <=> a[:updated_at]}
-
task_id = tasks.first[:id]
-
end
-
opts = Task::Status::Opts.new
-
if detail_level == :summary
-
opts[:no_components] = true
-
opts[:no_attributes] = true
-
end
-
-
task_structure = Task.get_hierarchical_structure(id_handle(task_id))
-
state_info = task_structure.status_hash_form(opts)
-
rest_ok_response state_info
-
end
-
-
1
def rest__create_task_from_pending_changes()
-
scope_x = ret_request_params(:scope)||{}
-
# TODO: put in check/error that there is no task created already, but not executed, that handles same changes
-
-
# process raw scope
-
scope =
-
if scope_x["target_ids"]
-
# TODO: stub
-
elsif scope_x["project_id"]
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and, :project_project_id, scope_x["project_id"].to_i]
-
}
-
target_ids = Model.get_objs(model_handle(:target),sp_hash).map{|r|r[:id]}
-
{:target_ids => target_ids}
-
else
-
# TODO: stub if scope by node_id
-
Log.info("node_id scope given (#{scope_x["node_id"]})") if scope_x["node_id"]
-
target_ids = Model.get_objs(model_handle(:target),{:cols => [:id]}).map{|r|r[:id]}
-
{:target_ids => target_ids}
-
end
-
return Error.new("Only treating scope by target ids") unless target_scope = scope[:target_ids]
-
return Error.new("Only treating scope given by single target") unless target_scope.size == 1
-
-
target_idh = id_handle(target_scope.first,:target)
-
pending_changes = StateChange.flat_list_pending_changes(target_idh)
-
-
if pending_changes.empty?
-
rest_notok_response :code => :no_pending_changes
-
else
-
task = Task.create_from_pending_changes(target_idh,pending_changes)
-
task.save!()
-
rest_ok_response :task_id => task.id
-
end
-
end
-
-
1
def rest__execute()
-
task_id = ret_non_null_request_params(:task_id)
-
task = Task.get_hierarchical_structure(id_handle(task_id))
-
workflow = Workflow.create(task)
-
Aux.stop_for_testing?(:converge) # TODO: for debugging
-
workflow.defer_execution()
-
rest_ok_response :task_id => task_id
-
end
-
-
1
def rest__cancel_task()
-
top_task_id = ret_non_null_request_params(:task_id)
-
cancel_task(top_task_id)
-
rest_ok_response :task_id => top_task_id
-
end
-
-
1
def rest__create_converge_state_changes()
-
node_id = ret_request_params(:node_id)
-
if node_id
-
node_idhs = [id_handle(node_id,:node)]
-
else
-
# means get set of nodes
-
# TODO: stub is to get all in target
-
sp_hash = {
-
:cols => [:id, :display_name],
-
:filter => [:neq, :datacenter_datacenter_id, nil]
-
}
-
node_idhs = Model.get_objs(model_handle(:node),sp_hash).map{|r|r.id_handle}
-
end
-
StateChange.create_converge_state_changes(node_idhs)
-
rest_ok_response
-
end
-
-
###TODO temp for mocking
-
1
@@count = 0
-
1
def debug_mock_record(state_info)
-
@@count += 1
-
File.open("/tmp/save#{@@count.to_s}","w"){|f|f << JSON.pretty_generate(state_info)}
-
end
-
1
def debug_mock_replay()
-
dir = File.expand_path('../spec/task_mock_data', File.dirname(__FILE__))
-
Dir.chdir(dir) do
-
save_files = Dir["*"]
-
file = "save#{@@count.to_s}"
-
file = save_files.sort.last unless save_files.include?(file)
-
@@count += 1
-
JSON.parse(File.open(file){|f|f.read})
-
end
-
end
-
### end temp for mocking
-
-
# TODO: test stub
-
1
def pretty_print(task_id=nil)
-
unless task_id
-
tasks = Task.get_top_level_tasks(model_handle).sort{|a,b| b[:updated_at] <=> a[:updated_at]}
-
task_id = tasks.first[:id]
-
end
-
top_task_idh = id_handle(task_id)
-
task_structure = Task.get_hierarchical_structure(top_task_idh)
-
pp_hash = task_structure.pretty_print_hash()
-
pp pp_hash
-
{:content => JSON.generate(pp_hash)}
-
end
-
-
-
1
def get_events(task_id=nil)
-
unless task_id
-
tasks = Task.get_top_level_tasks(model_handle).sort{|a,b| b[:updated_at] <=> a[:updated_at]}
-
task_id = tasks.first[:id]
-
else
-
raise Error.new("not implemented yet get_logs with task id given")
-
end
-
events = create_object_from_id(task_id).get_events
-
pp events
-
{:content => {}}
-
end
-
-
# TODO: templk hack
-
1
def rest__get_logs()
-
task_id = ret_request_params(:task_id)
-
# first time call can have no data because it is going to db and launching background call to nodes
-
num_tries = 1
-
max_tries = 5
-
done = false
-
while done or num_tries < max_tries do
-
data = get_logs(task_id)[:data]
-
if data.values.find{|node_log| node_log[:summary] and node_log[:summary][:type] == "no_data"}
-
sleep 0.5
-
num_tries += 1
-
else
-
done = true
-
end
-
end
-
data_reformulated = Array.new
-
data.each do |node_id,info|
-
# TODO: :complete is misleading
-
info.delete(:complete)
-
data_reformulated << info.merge(:node_id => node_id)
-
end
-
rest_ok_response data_reformulated
-
end
-
-
1
def get_logs(task_id=nil)
-
node_id = request.params["node_id"]
-
node_id = node_id && node_id.to_i
-
-
unless task_id
-
task = get_most_recent_task()
-
else
-
raise Error.new("not implemented yet get_logs with task id given")
-
end
-
-
assoc_nodes = ((task && task.get_associated_nodes())||[]).select{|n|node_id.nil? or n[:id] == node_id}
-
ndx_node_names = assoc_nodes.inject({}){|h,n|h.merge(n[:id] => n[:display_name])}
-
parsed_log = nil
-
found_error = nil
-
-
if R8::EnvironmentConfig::CommandAndControlMode == "mcollective"
-
logs_info = task ? TaskLog.get_and_update_logs_content(task,assoc_nodes,:top_task_id => task.id()) : {}
-
else
-
logs_info = get_logs_mock(assoc_nodes).inject({}) do |h,(k,v)|
-
h.merge(k => {:log => v, :type => "chef"})
-
end
-
end
-
-
####parse the logs
-
parsed_logs = {:no_data => Array.new,:ok => Array.new, :error => Array.new}
-
logs_info.each do |node_id,log_info|
-
log = log_info[:log]
-
node_name = ndx_node_names[node_id]
-
unless log
-
parsed_logs[:no_data] << {:node_id => node_id,:node_name => node_name}
-
next
-
end
-
log_type = log_info[:type].to_sym
-
pl = ParseLog.parse(log_type,log)
-
type = pl.find{|seg|seg.type == :error} ? :error : :ok
-
parsed_logs[type] << {:node_id => node_id, :node_name => node_name,:parsed_log => pl}
-
end
-
### end parse logs
-
# put log in hash/array form
-
hash_form = logs_in_hash_form(parsed_logs,node_id.nil?)
-
-
#### fidning file id TODO: this shoudl be pushed to lower level
-
hash_form.each do |k,v|
-
el = (v[:log_segments]||[]).last || {}
-
if efr=el[:error_file_ref]
-
file = ret_file_asset(efr[:file_name],efr[:type],efr[:cookbook])
-
efr[:file_id] = file[:id]
-
end
-
end
-
-
# TODO: temp hack so notice and err show for puppet logs
-
if (logs_info.values.first||{})[:type] == "puppet"
-
hash_form.each_value do |v|
-
(v[:log_segments]||[]).each do |seg|
-
if [:notice,:error].include?(seg[:type])
-
seg[:type] = :debug
-
end
-
end
-
end
-
end
-
-
{:data => hash_form}
-
end
-
-
1
def ret_file_asset(file_name,type,cookbook)
-
file_asset_path = ret_file_asset_path(file_name,type)
-
return nil unless file_asset_path and cookbook
-
sp_hash = {
-
:filter => [:eq, :path, file_asset_path],
-
:cols => [:id,:path,:implementation_info]
-
}
-
file_asset_mh = model_handle.createMH(:file_asset)
-
Model.get_objects_from_sp_hash(file_asset_mh,sp_hash).find{|x|x[:implementation][:repo] == cookbook}
-
end
-
-
1
def ret_file_asset_path(file_name,type)
-
return nil unless file_name
-
case type
-
when :template
-
# TODO: stub; since does not handle case where multiple versions
-
"templates/default/#{file_name}"
-
when :recipe
-
"recipes/#{file_name}"
-
end
-
end
-
-
1
def logs_in_hash_form(parsed_logs,is_single_node)
-
ret = Hash.new
-
parsed_logs.each do |type,logs|
-
logs.each do |log_info|
-
node_id = log_info[:node_id]
-
node_name = log_info[:node_name]
-
if type == :no_data
-
ret[node_id] = {
-
:node_name => node_name,
-
:summary => {
-
:type => :no_data
-
}
-
}
-
else
-
# TODO: change hash form so do not have to reformulate
-
pl = log_info[:parsed_log].hash_form
-
log_segments = pl[:log_segments]
-
-
# TODO: see what other chars that need to be removed; once finalize move this to under hash_form
-
log_segments = log_segments.map do |x|
-
line = x[:line] && x[:line].gsub(/["]/,"")
-
x.merge(:line => line)
-
end
-
-
error = nil
-
if (log_segments.last||{})[:type] == :error
-
error = log_segments.last
-
pp [:log_error,error]
-
# TODO: this looks like error; should be log_segments = log_segments[0..log_segments.size-2]
-
log_segments = log_segments[1..log_segments.size-1]
-
end
-
summary = error ? error : {:type => :ok}
-
ret[node_id] = {
-
:node_name => node_name,
-
:log_segments => log_segments,
-
:complete => pl[:complete],
-
:summary => summary
-
}
-
end
-
end
-
end
-
is_single_node ? ret.values.first : ret
-
end
-
-
1
def get_logs_test(level="info",task_id=nil)
-
-
# task_id is nil means get most recent task
-
# TODO: hack
-
# level = "info" if level == "undefined"
-
level = "summary" if level == "undefined"
-
level = level.to_sym
-
-
unless task_id
-
tasks = Task.get_top_level_tasks(model_handle).sort{|a,b| b[:updated_at] <=> a[:updated_at]}
-
task = tasks.first
-
else
-
raise Error.new("not implemented yet get_logs with task id given")
-
end
-
assoc_nodes = (task && task.get_associated_nodes())||[]
-
ndx_node_names = assoc_nodes.inject({}){|h,n|h.merge(n[:id] => n[:display_name])}
-
parsed_log = nil
-
found_error = nil
-
-
# if R8::Config[:command_and_control][:node_config][:type] == "mcollective"
-
if R8::EnvironmentConfig::CommandAndControlMode == "mcollective"
-
# TODO: do cases lower level
-
# logs = task ? CommandAndControl.get_logs(task,assoc_nodes) : []
-
logs_info = task ? TaskLog.get_and_update_logs_content(task,assoc_nodes,:top_task_id => task.id()) : {}
-
else
-
logs_info = get_logs_mock(assoc_nodes).inject({}) do |h,(k,v)|
-
h.merge(k => {:log => v, :type => "chef"})
-
end
-
end
-
-
####parse the logs
-
parsed_logs = {:no_data => Array.new,:ok => Array.new, :error => Array.new}
-
logs_info.each do |node_id,log_info|
-
log = log_info[:log]
-
node_name = ndx_node_names[node_id]
-
pp "log for node #{node_name} (id=#{node_id.to_s})"
-
unless log
-
pp "no log data"
-
parsed_logs[:no_data] << {:node_id => node_id,:node_name => node_name}
-
next
-
end
-
log_type = log_info[:type].to_sym
-
pl = ParseLog.parse(log_type,log)
-
##STDOUT << pl.pp_form_summary
-
# File.open("/tmp/raw#{node_id.to_s}.txt","w"){|f|log.each{|l|f << l+"\n"}}
-
##pp [:file_asset_if_error,pl.ret_file_asset_if_error(model_handle)]
-
##STDOUT << "----------------\n"
-
# TODO: hack whete find error node and if no error node first node
-
type = pl.find{|seg|seg.type == :error} ? :error : :ok
-
parsed_logs[type] << {:node_id => node_id, :node_name => node_name,:parsed_log => pl}
-
end
-
### end parse logs
-
-
view_type =
-
if no_results?(parsed_logs) then :simple
-
elsif level == :summary then parsed_logs[:error].empty? ? :simple : :error_detail
-
else level
-
end
-
tpl = find_template_for_view_type(view_type,parsed_logs)
-
{:content => tpl.render()}
-
end
-
1
private
-
1
ChefLogView = {
-
:debug => "task/chef_log_view",
-
:info => "task/chef_log_view",
-
:simple => "task/chef_log_view_simple",
-
:error_detail => "task/chef_log_view_error_detail"
-
}
-
1
def no_results?(parsed_logs)
-
not parsed_logs.values.find{|v|v.size > 0}
-
end
-
-
1
def each_parsed_log(parsed_logs,&block)
-
[:no_data,:ok,:error].each do |type|
-
parsed_logs[type].each do |el|
-
node_info = "#{el[:node_name]} (id=#{el[:node_id].to_s})"
-
block.call(type,node_info,el[:parsed_log])
-
end
-
end
-
end
-
1
def each_error_parsed_log(parsed_logs,&block)
-
parsed_logs[:error].each do |el|
-
node_info = "#{el[:node_name]} (id=#{el[:node_id].to_s})"
-
block.call(node_info,el[:parsed_log])
-
end
-
end
-
-
1
def find_template_for_view_type(view_type,parsed_logs)
-
ret = R8Tpl::TemplateR8.new(ChefLogView[view_type],user_context())
-
case view_type
-
when :simple
-
msgs = no_results?(parsed_logs) ? ["no results"] : summary(parsed_logs)
-
ret.assign(:msgs,msgs)
-
when :debug, :info
-
pls = Array.new
-
incl = view_type == :debug ? [:info,:debug] : [:info]
-
each_parsed_log(parsed_logs) do |type,node_info,parsed_log|
-
segments = (parsed_log||[]).select{|s|incl.include?(s.type)}.map{|s|s.hash_form()}
-
pls << {:type => type,:node_info => node_info,:segments => segments}
-
end
-
ret.assign(:parsed_logs,pls)
-
when :error_detail
-
# just showing error cases
-
errors = Array.new
-
each_error_parsed_log(parsed_logs) do |node_info,parsed_log|
-
hash_form = parsed_log.error_segment.hash_form()
-
pp [:error_info,hash_form]
-
err = [:error_detail,:error_lines].inject(:node_info => node_info) do |h,val|
-
h.merge(val => hash_form[val])
-
end
-
errors << err
-
end
-
ret.assign(:errors,errors)
-
end
-
ret
-
end
-
-
1
def summary(parsed_logs)
-
ret = Array.new
-
each_parsed_log(parsed_logs) do |type,node_info,parsed_log|
-
ret << "--------------- #{node_info} ----------------------"
-
summary =
-
if type == :no_data then "no_data"
-
elsif parsed_log.is_complete?() then type == :error ? "complete with error" : "complete and ok"
-
elsif type == :error then "incomplete with error"
-
else "incomplete no error yet"
-
end
-
ret << summary
-
ret << "-------------------------------------------------------"
-
end
-
ret
-
end
-
-
1
def get_logs_mock(assoc_nodes)
-
ret = Hash.new
-
i = 0
-
assoc_nodes.each do |node|
-
pp "log for node_id #{node[:id].to_s}"
-
file = File.expand_path(SampleSets[i], File.dirname(__FILE__))
-
raw_log = File.open(file){|f|f.read}
-
log = Array.new
-
raw_log.each_line{|l|log << l.chomp}
-
ret[node[:id]] = log
-
i += i
-
break if i >= SampleSets.size
-
end
-
ret
-
end
-
1
SampleSets = ["temp/error_example1.raw.txt"]
-
end
-
end
-
1
module DTK
-
1
class Test_moduleController < AuthController
-
1
helper :module_helper
-
1
helper :remotes_helper
-
-
1
def rest__test_generate_dsl()
-
test_module = create_obj(:test_module_id)
-
dsl_created_info = test_module.test_generate_dsl()
-
STDOUT << dsl_created_info[:content] << "\n"
-
rest_ok_response
-
end
-
-
#### create and delete actions ###
-
1
def rest__create()
-
module_name = ret_non_null_request_params(:module_name)
-
namespace = ret_request_param_module_namespace?()
-
project = get_default_project()
-
version = nil #TODO: stub
-
-
opts_local_params = (namespace ? {:namespace=>namespace} : {})
-
local_params = local_params(:test_module, module_name, opts_local_params)
-
-
opts_create_mod = Opts.new(
-
:config_agent_type => ret_config_agent_type()
-
)
-
module_repo_info = TestModule.create_module(project, local_params, opts_create_mod)[:module_repo_info]
-
rest_ok_response module_repo_info
-
end
-
-
1
def rest__update_from_initial_create()
-
test_module = create_obj(:test_module_id)
-
repo_id,commit_sha = ret_non_null_request_params(:repo_id,:commit_sha)
-
repo_idh = id_handle(repo_id,:repo)
-
version = ret_version()
-
scaffold = ret_request_params(:scaffold_if_no_dsl)
-
opts = {:scaffold_if_no_dsl => scaffold, :do_not_raise => true, :process_provider_specific_dependencies => true}
-
rest_ok_response test_module.import_from_file(commit_sha,repo_idh,version,opts)
-
end
-
-
1
def rest__update_model_from_clone()
-
test_module = create_obj(:test_module_id)
-
commit_sha = ret_non_null_request_params(:commit_sha)
-
version = ret_version()
-
diffs_summary = ret_diffs_summary()
-
opts = Hash.new
-
if ret_request_param_boolean(:internal_trigger)
-
opts.merge!(:do_not_raise => true)
-
end
-
if ret_request_param_boolean(:force_parse)
-
opts.merge!(:force_parse=> true)
-
end
-
dsl_created_info = test_module.update_model_from_clone_changes?(commit_sha,diffs_summary,version,opts)
-
rest_ok_response dsl_created_info
-
end
-
-
1
def rest__delete()
-
test_module = create_obj(:test_module_id)
-
module_info = test_module.delete_object()
-
rest_ok_response module_info
-
end
-
-
1
def rest__delete_version()
-
test_module = create_obj(:test_module_id)
-
version = ret_version()
-
module_info = test_module.delete_version(version)
-
rest_ok_response module_info
-
end
-
-
#### end: create and delete actions ###
-
-
#### list and info actions ###
-
1
def rest__list()
-
diff = ret_request_params(:diff)
-
project = get_default_project()
-
datatype = :module
-
remote_repo_base = ret_remote_repo_base()
-
-
opts = Opts.new(:project_idh => project.id_handle())
-
if detail = ret_request_params(:detail_to_include)
-
opts.merge!(:detail_to_include => detail.map{|r|r.to_sym})
-
end
-
-
opts.merge!(:remote_repo_base => remote_repo_base, :diff => diff)
-
datatype = :module_diff if diff
-
-
rest_ok_response filter_by_namespace(TestModule.list(opts)), :datatype => datatype
-
end
-
-
1
def rest__get_workspace_branch_info()
-
test_module = create_obj(:test_module_id)
-
version = ret_version()
-
rest_ok_response test_module.get_workspace_branch_info(version)
-
end
-
-
1
def rest__info()
-
module_id = ret_request_param_id_optional(:test_module_id, ::DTK::TestModule)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
rest_ok_response TestModule.info(model_handle(), module_id, opts)
-
end
-
-
1
def rest__list_remote_diffs()
-
test_module = create_obj(:test_module_id)
-
version = nil
-
rest_ok_response test_module.list_remote_diffs(version)
-
end
-
-
#
-
# Method will check new dependencies on repo manager and report missing dependencies.
-
# Response will return list of modules for given component.
-
#
-
1
def rest__resolve_pull_from_remote()
-
rest_ok_response resolve_pull_from_remote(:test_module)
-
end
-
-
-
1
def rest__pull_from_remote()
-
rest_ok_response pull_from_remote_helper(TestModule)
-
end
-
-
1
def rest__remote_chmod()
-
response = chmod_from_remote_helper()
-
rest_ok_response(response)
-
end
-
-
1
def rest__remote_chown()
-
chown_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__confirm_make_public()
-
rest_ok_response confirm_make_public_helper()
-
end
-
-
1
def rest__remote_collaboration()
-
collaboration_from_remote_helper()
-
rest_ok_response
-
end
-
-
1
def rest__list_remote_collaboration()
-
response = list_collaboration_from_remote_helper()
-
rest_ok_response response
-
end
-
-
1
def rest__versions()
-
test_module = create_obj(:test_module_id)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
-
rest_ok_response test_module.local_and_remote_versions(client_rsa_pub_key, opts)
-
end
-
-
1
def rest__info_about()
-
test_module = create_obj(:test_module_id)
-
about = ret_non_null_request_params(:about).to_sym
-
component_template_id = ret_request_params(:component_template_id)
-
unless AboutEnum.include?(about)
-
raise ErrorUsage::BadParamValue.new(:about,AboutEnum)
-
end
-
rest_ok_response test_module.info_about(about, component_template_id)
-
end
-
-
1
AboutEnum = [:components, :attributes, :instances]
-
-
#### end: list and info actions ###
-
-
#### actions to interact with remote repos ###
-
# TODO: rename; this is just called by install; import ops call create route
-
1
def rest__import()
-
rest_ok_response install_from_dtkn_helper(:test_module)
-
end
-
-
# TODO: rename; this is just called by publish
-
1
def rest__export()
-
test_module = create_obj(:test_module_id)
-
rest_ok_response publish_to_dtkn_helper(test_module)
-
end
-
-
-
# this should be called when the module is linked, but the specfic version is not
-
1
def rest__import_version()
-
test_module = create_obj(:test_module_id)
-
remote_repo = ret_remote_repo()
-
version = ret_version()
-
rest_ok_response test_module.import_version(remote_repo,version)
-
end
-
-
# TODO: ModuleBranch::Location: harmonize this signature with one for service module
-
1
def rest__delete_remote()
-
remote_module_name = ret_non_null_request_params(:remote_module_name)
-
remote_namespace = ret_request_params(:remote_module_namespace)
-
force_delete = ret_request_param_boolean(:force_delete)
-
-
remote_params = remote_params_dtkn(:test_module,remote_namespace,remote_module_name)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
-
project = get_default_project()
-
TestModule.delete_remote(project, remote_params, client_rsa_pub_key, force_delete)
-
rest_ok_response
-
end
-
-
1
def rest__list_remote()
-
test_modules = TestModule.list_remotes(model_handle, ret_request_params(:rsa_pub_key))
-
rest_ok_response filter_by_namespace(test_modules), :datatype => :module_remote
-
end
-
-
# get remote_module_info; throws an access rights usage error if user does not have access
-
1
def rest__get_remote_module_info()
-
test_module = create_obj(:test_module_id)
-
rest_ok_response get_remote_module_info_helper(test_module)
-
end
-
-
#### end: actions to interact with remote repo ###
-
-
#### actions to manage workspace
-
-
1
def rest__create_new_version()
-
test_module = create_obj(:test_module_id)
-
version = ret_version()
-
-
test_module.create_new_version(version)
-
rest_ok_response
-
end
-
-
1
def rest__create_new_dsl_version()
-
test_module = create_obj(:test_module_id)
-
dsl_version = ret_non_null_request_params(:dsl_version).to_i
-
module_version = ret_version()
-
format = :json
-
test_module.create_new_dsl_version(dsl_version,format,module_version)
-
rest_ok_response
-
end
-
-
#### end: actions to manage workspace and promote changes from workspace to library ###
-
-
1
def rest__push_to_mirror()
-
test_module = create_obj(:test_module_id)
-
mirror_host = ret_non_null_request_params(:mirror_host)
-
test_module.push_to_mirror(mirror_host)
-
end
-
-
1
def rest__info_git_remote()
-
test_module = create_obj(:test_module_id)
-
info_git_remote(test_module)
-
end
-
-
1
def rest__add_git_remote()
-
test_module = create_obj(:test_module_id)
-
add_git_remote(test_module)
-
end
-
-
1
def rest__remove_git_remote()
-
test_module = create_obj(:test_module_id)
-
remove_git_remote(test_module)
-
end
-
-
end
-
end
-
1
require 'base64'
-
-
1
module XYZ
-
1
class UserController < Controller
-
-
1
ENCRYPTION_SALT = '397dedcf120682329a34a00a9bc768dfdf34062c'
-
-
1
def index
-
end
-
-
1
def login()
-
action_name = "login"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign(:_app,app_common())
-
return {:content => tpl.render()}
-
end
-
-
1
def logout()
-
user_logout
-
redirect R8::Config[:login][:path]
-
end
-
-
1
def process_logout()
-
user_logout
-
return {:content => nil}
-
end
-
-
1
def process_login(explicit_hash=nil)
-
1
hash = explicit_hash || request.params.dup
-
-
# no need to do hashing of password at this stage since it will be hashed by User#authenticate
-
1
cred = { :username => hash["username"], :password => DataEncryption.hash_it(hash["password"]), :c => ret_session_context_id(), :access_time => Time.now()}
-
1
begin
-
1
login_response = user_login(cred)
-
rescue ::Sequel::DatabaseDisconnectError, ::Sequel::DatabaseConnectionError => e
-
respond(e, 403)
-
end
-
-
1
current_session = CurrentSession.new
-
1
current_session.set_user_object(user_object())
-
-
# expire time set, we use Innate session for this
-
1
session.store(:last_ts, Time.now.to_i)
-
-
# expires tag + 3 hours
-
1
cookie_expire_time = (Time.now + 3*3600)
-
1
encrypt_info = ::AESCrypt.encrypt("#{user_object()[:id]}_#{cookie_expire_time.to_i}_#{user_object()[:c]}", ENCRYPTION_SALT, ENCRYPTION_SALT)
-
-
1
unless R8::Config[:session][:cookie][:disabled]
-
# set session cookie, we set both expire time and user id in it
-
1
response.set_cookie(
-
"dtk-user-info",
-
:value => Base64.encode64(encrypt_info),
-
:expires => cookie_expire_time
-
)
-
end
-
-
1
if login_response
-
1
rest_request? ? {:content => nil} : redirect(hash["redirect"]||R8::Config[:login][:redirect])
-
else
-
auth_violation_response()
-
end
-
end
-
-
1
def register()
-
action_name = "register"
-
tpl = R8Tpl::TemplateR8.new("#{model_name()}/#{action_name}",user_context())
-
tpl.assign(:_app,app_common())
-
return {:content => tpl.render()}
-
end
-
-
1
def save_register_info(explicit_hash=nil)
-
# TODO: stub; must make sure that user not created already
-
hash = explicit_hash || request.params.dup
-
save({"name" => hash["username"]}.merge(hash))
-
end
-
-
# TODO: why does it bomb when id field is added? should be optional
-
1
def edit
-
tpl = R8Tpl::TemplateR8.new("user/edit",user_context())
-
tpl.assign(:_app,app_common())
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/user.component')
-
# run_javascript('setTimeout(initUserForm,500);')
-
run_javascript('R8.UserComponent.init();')
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def update_settings(id)
-
settings = JSON.parse(request.params["settings"])
-
-
pp 'GOING TO UPDATE USER SETTINGS------------------'
-
pp settings
-
unless settings.empty?
-
user = CurrentSession.new.get_user_object()
-
user.update_settings(settings) if user
-
end
-
return {:data=>{}}
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class ViewspaceController < AuthController
-
-
1
def update_pos(id)
-
pp 'Updating items positions in viewspace:'
-
# pp request.params('items')
-
pp request.params
-
p '==================================================='
-
return {}
-
end
-
end
-
end
-
-
1
module XYZ
-
1
class WorkspaceController < AuthController
-
1
helper :i18n_string_mapping
-
-
1
def index()
-
projects = Project.get_all(model_handle(:project))
-
pp [:projects,projects]
-
-
projects.each_with_index { |p,i|
-
projects[i][:tree] = {}
-
projects[i][:tree][:targets] = p.get_target_tree()
-
projects[i][:tree][:implementations] = p.get_module_tree(:include_file_assets => true)
-
projects[i][:name] = projects[i][:display_name]
-
}
-
tpl = R8Tpl::TemplateR8.new("ide/project_tree_leaf",user_context())
-
tpl.set_js_tpl_name("project_tree_leaf")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("ide/l_panel",user_context())
-
tpl.set_js_tpl_name("l_panel")
-
# tpl = R8Tpl::TemplateR8.new("ide/panel_frame",user_context())
-
# tpl.set_js_tpl_name("ide_panel_frame")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("ide/editor_panel",user_context())
-
tpl.set_js_tpl_name("editor_panel")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
#==========================
-
# Include target specific js that will be needed
-
# TODO: move out of here eventually
-
tpl_info_hash = Hash.new
-
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_display",user_context())
-
tpl.set_js_tpl_name("ng_wspace_display")
-
tpl_info_hash[:node_group] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node_group][:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display_ide",user_context())
-
tpl.set_js_tpl_name("node_wspace_display_ide")
-
tpl_info_hash[:node] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node][:src])
-
-
tpl = R8Tpl::TemplateR8.new("datacenter/wspace_monitor_display",user_context())
-
tpl.set_js_tpl_name("wspace_monitor_display")
-
tpl_info_hash[:monitor] = tpl.render()
-
include_js_tpl(tpl_info_hash[:monitor][:src])
-
-
tpl = R8Tpl::TemplateR8.new("workspace/notification_list_ide",user_context())
-
tpl.set_js_tpl_name("notification_list_ide")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("component/library_search",user_context())
-
tpl.set_js_tpl_name("component_library_search")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/library_search",user_context())
-
tpl.set_js_tpl_name("node_library_search")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
tpl = R8Tpl::TemplateR8.new("assembly/library_search",user_context())
-
tpl.set_js_tpl_name("assembly_library_search")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
#==========================
-
-
# include_js('plugins/search.cmdhandler2')
-
include_js('plugins/r8.cmdbar.assemblies')
-
include_js('plugins/r8.cmdbar.components')
-
include_js('plugins/r8.cmdbar.nodes')
-
include_js('plugins/r8.cmdbar.tasks')
-
-
projects_json = JSON.generate(projects)
-
# TODO: figure out why this user init isnt firing inside of bundle and return
-
# DEBUG
-
run_javascript("R8.User.init();")
-
run_javascript("R8.IDE.init(#{projects_json});")
-
-
# run_javascript("R8.IDE.addProjects(#{projects_json});")
-
-
# tpl = R8Tpl::TemplateR8.new("ide/test_tree2",user_context())
-
# run_javascript("R8.IDE.testTree();")
-
-
return {:content=>tpl.render(),:panel=>'project_panel'}
-
-
# return {:content=>''}
-
end
-
-
# TODO: move to viewspace controller
-
1
def update_pos(ws_id)
-
items_to_save = JSON.parse(request.params["item_list"])
-
-
return {} if items_to_save.empty?
-
-
# TODO: patch that maps nil model_name to node_group
-
items_to_save.values.each{|item|item["model"] ||= "node_group"}
-
-
# partition into model types
-
model_names = items_to_save.values.map{|item|item["model"].to_sym}.uniq
-
model_names.each do |model_name|
-
model_handle = ModelHandle.new(ret_session_context_id(),model_name)
-
-
model_items = items_to_save.reject{|item_id,info|not info["model"].to_sym == model_name}
-
-
# TODO: temp hack; adn expensive call taht shoudl be removed; make sure that items_to_save all have ws_id as parent; hack now assumeing ws_id is just datacenter
-
parent_field = :datacenter_datacenter_id
-
model_items.reject! do |item_id,info|
-
idh = id_handle(item_id.to_i,info["model"].to_sym)
-
if idh[:parent_model_name] == :datacenter
-
false
-
else
-
obj_info = idh.create_object.get_objects_from_sp_hash(:columns => [parent_field]).first
-
not (obj_info||{})[parent_field] == (ws_id && ws_id.to_i)
-
end
-
end
-
#############################################
-
-
-
update_rows = model_items.map do |item_id,info|
-
{
-
:id => item_id.to_i,
-
:ui => {ws_id.to_s.to_sym =>
-
{:left => info["pos"]["left"].gsub(/[^0-9]+/,"").to_i,
-
:top => info["pos"]["top"].gsub(/[^0-9]+/,"").to_i}
-
}
-
}
-
end
-
Model.update_from_rows(model_handle,update_rows,:partial_value=>true)
-
-
# TODO: remove debug statement
-
=begin
-
pp [:model_name,model_name]
-
pp [:model_items,model_items]
-
pp [:debug_stored_new_pos,get_objects(model_name,SQL.in(:id,model_items.map{|item|item[0].to_i}),Model::FieldSet.opt([:id,:ui],model_name))]
-
=end
-
end
-
return {}
-
end
-
-
=begin
-
def index
-
# TODO: make call to load the users/system already in use plugins,cmds,etc
-
# include_js('plugins/search.cmdhandler')
-
# TODO: remove this after fully getting viewspaces going
-
# add_js_exe("R8.Workspace.setupNewItems();")
-
# add_js_exe("R8.Toolbar.init({node:'group-01',tools:['quicksearch']});")
-
include_js('plugins/search.cmdhandler')
-
-
return {
-
:content=>'',
-
:panel=>'viewspace'
-
}
-
end
-
=end
-
-
1
def loaddatacenter(id,parsed_query_string=nil)
-
# TODO: make call to load the users/system already in use plugins,cmds,etc
-
include_js('plugins/search.cmdhandler')
-
-
=begin
-
# TODO: make this generic to load all items for a viewspace, not just loaddatacenter
-
# retrieve the nodes
-
node_list = get_objects(node_name.to_sym,where_clause)
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_list",user_context())
-
tpl.set_js_tpl_name("node_wspace_list")
-
tpl.assign('node_list',node_list)
-
=end
-
return {:content=>''}
-
end
-
-
# This function will be called after the workspace framework is loaded,
-
# probably as part of an action set
-
1
def loadtoolbar
-
=begin
-
toolbar_items = workspace.get_toolbar_items
-
layout :workspace__toolbar_items
-
assign(@toolbar_items,toolbar_items)
-
render 'toolbar_items'
-
-
# build in roles/permission checks here to filter the list
-
=end
-
end
-
-
1
def search
-
params = request.params.dup
-
model_name = params.delete("model_name").to_sym
-
# TODO: hack to get around restriction type = template does not allow one to see assemblies
-
# TODO: need to determine how to handle an assembly that is a template; may just assume everything in library is template
-
# and then do away with explicitly setting type to "template"
-
params.delete("type") if model_name == :component
-
cols = model_class(model_name).common_columns()
-
filter_conjuncts = params.map do |name,value|
-
[:regex,name.to_sym,"^#{value}"] if cols.include?(name.to_sym)
-
end.compact
-
# restrict results to belong to library and not nested in assembly
-
filter_conjuncts += [[:neq,:library_library_id,nil],[:eq,:assembly_id,nil]]
-
sp_hash = {
-
:cols => cols,
-
:filter => [:and] + filter_conjuncts
-
}
-
model_list = Model.get_objs(model_handle(model_name),sp_hash).each{|r|r.materialize!(cols)}
-
-
i18n = get_i18n_mappings_for_models(model_name)
-
model_list.each_with_index do |model,index|
-
# pp model
-
model_list[index][:model_name] = model_name
-
body_value = ''
-
model_list[index][:ui] ||= {}
-
model_list[index][:ui][:images] ||= {}
-
name = model_list[index][:display_name]
-
title = name.nil? ? "" : i18n_string(i18n,model_name,name)
-
-
# TDOO: temporary to distingusih between chef and puppet components
-
if model_name == :component
-
if config_agent_type = model_list[index][:config_agent_type]
-
title += " (#{config_agent_type[0].chr})"
-
end
-
end
-
-
# TODO: change after implementing all the new types and making generic icons for them
-
model_type = 'service'
-
model_sub_type = 'db'
-
model_type_str = "#{model_type}-#{model_sub_type}"
-
prefix = "#{R8::Config[:base_images_uri]}/#{model_name}Icons"
-
png = model_list[index][:ui][:images][:tnail] || "unknown-#{model_type_str}.png"
-
model_list[index][:image_path] = "#{prefix}/#{png}"
-
-
model_list[index][:i18n] = title
-
-
=begin
-
img_value = model_list[index][:ui][:images][:tnail] ?
-
'<div class="img_wrapper"><img title="'+title+'"src="'+R8::Config[:base_images_uri]+'/'+model_name+'Icons/'+model_list[index][:ui][:images][:tnail]+'"/></div>' :
-
''
-
body_value = img_value
-
-
body_value == '' ? body_value = model_list[index][:display_name] : nil
-
model_list[index][:body_value] = body_value
-
=end
-
end
-
-
# pp "^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
-
# pp model_list
-
# pp "^^^^^^^^^^^^^^^^^^^^^^^^^^^^"
-
-
tpl = R8Tpl::TemplateR8.new("workspace/wspace_search_#{model_name}",user_context())
-
tpl.set_js_tpl_name("wspace_search_#{model_name}")
-
tpl.assign('model_list',model_list)
-
-
slide_width = 130*model_list.size
-
tpl.assign('slide_width',slide_width)
-
# TODO: needed to below back in so template did not barf
-
# }
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name,user_context())
-
tpl.assign("_workspace",_model_var)
-
tpl.assign("model_name",model_name.to_s)
-
-
tpl_result = tpl.render()
-
tpl_result[:panel] = "#{model_name}-search-list-container"
-
return tpl_result
-
end
-
-
# TODO: datacenter_id=nil is stub
-
1
def list_items(datacenter_id=nil)
-
-
if datacenter_id.nil?
-
datacenter_id = IDHandle[:c => ret_session_context_id(), :uri => "/datacenter/dc1", :model_name => :datacenter].get_id()
-
end
-
-
model_name = :node_group
-
filter_params = {:parent_id => datacenter_id}
-
search_object = ret_node_group_search_object(filter_params)
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
pp model_list
-
-
run_javascript("R8.Workspace.setupNewItems();")
-
top = 100
-
left = 100
-
model_list.each_with_index do |node_group,index|
-
model_list[index][:model_name] = model_name
-
model_list[index][:ui].nil? ? model_list[index][:ui] = {} : nil
-
model_list[index][:ui][:top].nil? ? model_list[index][:ui][:top] = top : nil
-
model_list[index][:ui][:left].nil? ? model_list[index][:ui][:left] = left : nil
-
top = top+100
-
left = left+100
-
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[index][:id]}',tools:['quicksearch']});")
-
end
-
add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_list",user_context())
-
# tpl.set_js_tpl_name("wspace_list_ng_#{model_name}")
-
tpl.assign('node_group_list',model_list)
-
-
# TODO: temp
-
tpl.assign('datacenter_name','dc1')
-
# 2
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("model_name",model_name)
-
tpl.assign("num_nodes",10) #TODO stub
-
# tpl_result = tpl.render()
-
# tpl_result[:panel] = 'viewspace'
-
# return tpl_result
-
return {
-
:content => tpl.render(),
-
:panel => 'viewspace'
-
}
-
end
-
-
# TODO: datacenter_id=nil is stub
-
1
def list_items_new(datacenter_id=nil)
-
if datacenter_id.nil?
-
datacenter_id = IDHandle[:c => ret_session_context_id(), :uri => "/datacenter/dc1", :model_name => :datacenter].get_id()
-
end
-
-
datacenter = get_object_by_id(datacenter_id,:datacenter)
-
pp datacenter
-
view_space = Hash.new
-
view_space = {
-
:type => 'datacenter',
-
:object => datacenter
-
}
-
v_space_obj = JSON.generate(view_space)
-
run_javascript("R8.Workspace.pushViewSpace(#{v_space_obj});")
-
-
model_name = :node_group
-
filter_params = {:parent_id => datacenter_id}
-
search_object = ret_node_group_search_object(filter_params)
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
# pp model_list
-
-
top = 100
-
left = 100
-
#--------Setup Toolbar for access each group from ACL's---------
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
user_has_toolbar_access = true
-
user_group_tool_list = Array.new
-
user_group_tool_list << 'quicksearch'
-
toolbar_def = {
-
:tools => user_group_tool_list
-
}
-
-
# TODO: place holder stubs for ideas on possible future behavior
-
# UI::workspace.add_item(model_list[i])
-
# UI::workspace.render()
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_list",user_context())
-
tpl.set_js_tpl_name("wspace_list_ng_#{model_name}")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
=begin
-
{:template_vars=>{},
-
:src=>"wspace_list_ng_node_group.js",
-
:template_callback=>"wspace_list_ng_node_group"}
-
=end
-
-
model_list.each_with_index do |node_group,index|
-
model_list[index][:model_name] = model_name
-
model_list[index][:ui].nil? ? model_list[index][:ui] = {} : nil
-
model_list[index][:ui][:top].nil? ? model_list[index][:ui][:top] = top : nil
-
model_list[index][:ui][:left].nil? ? model_list[index][:ui][:left] = left : nil
-
top = top+100
-
left = left+100
-
-
#--------Setup Item In Workspace---------
-
# item_def = JSON.generate(model_list[index])
-
# add_js_exe("R8.Workspace.setupItem({type:'node_group',item:#{item_def},'toolbar_def':#{toolbar_def}});")
-
#----------------------------------------
-
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[index][:id]}',tools:['quicksearch']});")
-
end
-
-
# TODO: decide if its possible in clean manner to manage toolbar access at item level in ad-hoc ways
-
# right now single toolbar def for all items in list for each type
-
#--------Add Node Group List to Workspace-----
-
items = Hash.new
-
items = {
-
:type => 'node_group',
-
:items => model_list,
-
:toobar_def => toolbar_def,
-
:tpl_callback => tpl_info[:template_callback]
-
}
-
addItemsObj = JSON.generate(items)
-
run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
#---------------------------------------------
-
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
-
=begin
-
tpl.assign('node_group_list',model_list)
-
-
# TODO: temp
-
tpl.assign('datacenter_name','dc1')
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("model_name",model_name)
-
tpl.assign("num_nodes",10) #TODO stub
-
# tpl_result = tpl.render()
-
# tpl_result[:panel] = 'viewspace'
-
# return tpl_result
-
=end
-
return {
-
:content => '',
-
:panel => 'viewspace'
-
}
-
=begin
-
return {
-
:content => tpl.render(),
-
:panel => 'viewspace'
-
}
-
=end
-
end
-
-
# TODO: datacenter_id=nil is stub
-
1
def list_items_new(datacenter_id=nil)
-
if datacenter_id.nil?
-
datacenter_id = IDHandle[:c => ret_session_context_id(), :uri => "/datacenter/dc1", :model_name => :datacenter].get_id()
-
end
-
-
datacenter = get_object_by_id(datacenter_id,:datacenter)
-
pp datacenter
-
view_space = Hash.new
-
view_space = {
-
:type => 'datacenter',
-
:object => datacenter
-
}
-
v_space_obj = JSON.generate(view_space)
-
run_javascript("R8.Workspace.pushViewSpace(#{v_space_obj});")
-
-
model_name = :node_group
-
filter_params = {:parent_id => datacenter_id}
-
search_object = ret_node_group_search_object(filter_params)
-
-
model_list = Model.get_objects_from_search_object(search_object)
-
# pp model_list
-
-
top = 100
-
left = 100
-
#--------Setup Toolbar for access each group from ACL's---------
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
user_has_toolbar_access = true
-
user_group_tool_list = Array.new
-
user_group_tool_list << 'quicksearch'
-
toolbar_def = {
-
:tools => user_group_tool_list
-
}
-
-
# TODO: place holder stubs for ideas on possible future behavior
-
# UI::workspace.add_item(model_list[i])
-
# UI::workspace.render()
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_list",user_context())
-
tpl.set_js_tpl_name("wspace_list_ng_#{model_name}")
-
tpl_info = tpl.render()
-
include_js_tpl(tpl_info[:src])
-
-
=begin
-
{:template_vars=>{},
-
:src=>"wspace_list_ng_node_group.js",
-
:template_callback=>"wspace_list_ng_node_group"}
-
=end
-
-
model_list.each_with_index do |node_group,index|
-
model_list[index][:model_name] = model_name
-
model_list[index][:ui].nil? ? model_list[index][:ui] = {} : nil
-
model_list[index][:ui][:top].nil? ? model_list[index][:ui][:top] = top : nil
-
model_list[index][:ui][:left].nil? ? model_list[index][:ui][:left] = left : nil
-
top = top+100
-
left = left+100
-
-
#--------Setup Item In Workspace---------
-
# item_def = JSON.generate(model_list[index])
-
# add_js_exe("R8.Workspace.setupItem({type:'node_group',item:#{item_def},'toolbar_def':#{toolbar_def}});")
-
#----------------------------------------
-
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[index][:id]}',tools:['quicksearch']});")
-
end
-
-
# TODO: decide if its possible in clean manner to manage toolbar access at item level in ad-hoc ways
-
# right now single toolbar def for all items in list for each type
-
#--------Add Node Group List to Workspace-----
-
items = Hash.new
-
items = {
-
:type => 'node_group',
-
:items => model_list,
-
:toobar_def => toolbar_def,
-
:tpl_callback => tpl_info[:template_callback]
-
}
-
addItemsObj = JSON.generate(items)
-
run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
#---------------------------------------------
-
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
-
=begin
-
tpl.assign('node_group_list',model_list)
-
-
# TODO: temp
-
tpl.assign('datacenter_name','dc1')
-
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("model_name",model_name)
-
tpl.assign("num_nodes",10) #TODO stub
-
# tpl_result = tpl.render()
-
# tpl_result[:panel] = 'viewspace'
-
# return tpl_result
-
=end
-
return {
-
:content => '',
-
:panel => 'viewspace'
-
}
-
=begin
-
return {
-
:content => tpl.render(),
-
:panel => 'viewspace'
-
}
-
=end
-
end
-
-
=begin
-
def list_items_2(datacenter_id)
-
datacenter = id_handle(datacenter_id,:datacenter).create_object()
-
datacenter_id = datacenter.id()
-
-
include_js('plugins/search.cmdhandler')
-
view_space = {
-
:type => 'datacenter',
-
:object => datacenter
-
}
-
v_space_obj = JSON.generate(view_space)
-
run_javascript("R8.Workspace.pushViewSpace(#{v_space_obj});")
-
-
#--------Setup Toolbar for access each group from ACL's---------
-
# add_js_exe("R8.Toolbar.init({node:'group-#{model_list[0][:id]}',tools:['quicksearch']});")
-
user_has_toolbar_access = true
-
user_group_tool_list = Array.new
-
user_group_tool_list << 'quicksearch'
-
toolbar_def = {:tools => user_group_tool_list}
-
-
include_js('toolbar.quicksearch.r8')
-
-
tpl_info_hash = Hash.new
-
-
tpl = R8Tpl::TemplateR8.new("node_group/wspace_display",user_context())
-
tpl.set_js_tpl_name("ng_wspace_display")
-
tpl_info_hash[:node_group] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node_group][:src])
-
-
tpl = R8Tpl::TemplateR8.new("node/wspace_display",user_context())
-
tpl.set_js_tpl_name("node_wspace_display")
-
tpl_info_hash[:node] = tpl.render()
-
include_js_tpl(tpl_info_hash[:node][:src])
-
-
##### ----------------- add in model info
-
model_list = datacenter.get_items()
-
-
items = model_list.map do |object|
-
model_name = object.model_name
-
{
-
:type => model_name.to_s,
-
:object => object,
-
:toolbar_def => toolbar_def,
-
:tpl_callback => tpl_info_hash[model_name][:template_callback],
-
:ui => object[:ui][datacenter_id.to_s.to_sym]
-
}
-
end
-
-
addItemsObj = JSON.generate(items)
-
run_javascript("R8.Workspace.addItems(#{addItemsObj});")
-
-
#---------------------------------------------
-
-
return {
-
:content => '',
-
:panel => 'viewspace'
-
}
-
end
-
=end
-
-
1
def search_2
-
# pp request.params
-
model_name = request.params['model_name']
-
field_set = Model::FieldSet.default(model_name.to_sym)
-
# search_query = request.params['sq']
-
-
where_clause = {}
-
request.params.each do |name,value|
-
(field_set.include_col?(name.to_sym)) ? where_clause[name.to_sym] = value : nil;
-
end
-
-
# where_clause = {:display_name => search_query}
-
if where_clause
-
where_clause = where_clause.inject(nil){|h,o|SQL.and(h,SQL::WhereCondition.like(o[0],"#{o[1]}%"))}
-
end
-
-
model_list = get_objects(model_name.to_sym,where_clause)
-
model_list.each_with_index do |model,index|
-
model_list[index][:model_name] = model_name
-
body_value = ''
-
model_list[index][:ui] ||= {}
-
model_list[index][:ui][:images] ||= {}
-
img_value = model_list[index][:ui][:images][:tnail] ? img_value = '<div class="img_wrapper"><img title="' << model_list[index][:display_name] << '"' << 'src="' << R8::Config[:base_images_uri] << '/' << model_name << 'Icons/'<< model_list[index][:ui][:images][:tnail] << '"/></div>' : ""
-
body_value = img_value
-
-
body_value == '' ? body_value = model_list[index][:display_name] : nil
-
model_list[index][:body_value] = body_value
-
end
-
-
tpl = R8Tpl::TemplateR8.new("workspace/wspace_search_#{model_name}_2",user_context())
-
tpl.set_js_tpl_name("wspace_search_#{model_name}_2")
-
tpl.assign('model_list',model_list)
-
-
slide_width = 170*model_list.size
-
tpl.assign('slide_width',slide_width)
-
slider_id_prefix = (request.params['slider_id_prefix']) ? request.params['slider_id_prefix'] : model_name
-
tpl.assign('slider_id_prefix',slider_id_prefix)
-
-
# TODO: needed to below back in so template did not barf
-
# }
-
_model_var = {}
-
_model_var[:i18n] = get_model_i18n(model_name,user_context())
-
tpl.assign("_#{model_name().to_s}",_model_var)
-
tpl.assign("model_name",model_name)
-
-
tpl_result = tpl.render()
-
-
tpl_result[:panel] = (request.params['panel_id']) ? request.params['panel_id'] : model_name+'-search-list-container'
-
-
return tpl_result
-
end
-
-
# TODO: check if this and its referents are deprecated
-
1
def ret_node_group_search_object(filter_params)
-
model_name = :node_group
-
parent_model_name = :datacenter
-
# for processing :parent_id
-
parent_id_field_name = ModelHandle.new(ret_session_context_id(),model_name,parent_model_name).parent_id_field_name()
-
filter = [:and] + filter_params.map do |k,v|
-
[:eq, k == :parent_id ? parent_id_field_name : k, v]
-
end
-
hash = {
-
"search_pattern" => {
-
:relation => model_name,
-
:filter => filter,
-
:columns => [:id, :display_name]
-
}
-
}
-
SearchObject.create_from_input_hash(hash,:workspace,ret_session_context_id())
-
end
-
-
-
# deprecate
-
1
def commit_changes(datacenter_id=nil)
-
# context_type = request.params["context_type"]
-
# TODO: either use param from context id or from what is posted
-
# TODO: move to getting id of top level task
-
context_id = request.params["context_id"]
-
datacenter_id ||= context_id
-
-
datacenter_id = datacenter_id && datacenter_id.to_i
-
-
pending_changes = flat_list_pending_changes_in_datacenter(datacenter_id)
-
if pending_changes.empty?
-
run_javascript("R8.Workspace.showAlert('No Pending Changes to Commit');")
-
return {}
-
end
-
-
top_level_task = create_task_from_pending_changes(pending_changes)
-
-
# TODO: need to sync ValidationError with analysis done in group by
-
errors = Violation.find_missing_required_attributes(top_level_task)
-
# TODO: removing for time being
-
# if errors
-
pp [:errors,errors] if errors
-
if false
-
error_list = []
-
# TODO: stub
-
i18n = {
-
"MissingRequiredAttribute"=>'is missing required Attribute'
-
}
-
alert_msg = "'Commit errors for missing attrs'"
-
error_str = "Commit errors for missing attrs<br/>"
-
errors.each { |e|
-
error_name = Aux::demodulize(e.class.to_s)
-
case error_name
-
when "MissingRequiredAttribute"
-
error_description = "Component <b>#{e[:component_name]}</b> on node <b>#{e[:node_name]}</b> "+i18n[error_name]+"#{e[:attribute_name]}"
-
end
-
# TODO: revisit when fully implementing notifications/feed, right now warnings on component add are different then commit errors
-
e[:name] = error_name
-
e[:target_node_id] = e[:node_id]
-
e[:description] = error_description
-
e[:type] = "error"
-
error_list << e
-
}
-
run_javascript("R8.Workspace.showAlert(#{alert_msg});")
-
error_list_json = JSON.generate(error_list)
-
run_javascript("R8.Notifications.addErrors(#{error_list_json});")
-
return {}
-
end
-
-
test_str = "pending changes:\n"
-
pending_changes.each do |sc|
-
test_str << " type=#{sc[:type]}; id=#{(sc[:component]||sc[:node])[:id].to_s}; name=#{(sc[:component]||sc[:node])[:display_name]||'UNSET'}\n"
-
end
-
-
top_level_task.save!()
-
workflow = Workflow.create(top_level_task)
-
workflow.defer_execution()
-
-
run_javascript("R8.Workspace.showAlert('Commit Logged,Pending Execution');")
-
return {
-
'data'=>test_str
-
}
-
end
-
-
# TODO: doing redundant work to what is done in commit_ide
-
1
def commit_changes_ide(target_id)
-
target_id = target_id.gsub(/editor-target-/,"") #TODO: temp to compensate front end error
-
-
target_idh = id_handle(target_id,:target)
-
hash = request.params.dup
-
commit_date = hash.delete("commit_date")
-
commit_msg = hash.delete("commit_msg")
-
-
# save any params given
-
attr_val_hash = hash
-
attr_val_hash.each{|k,v|attr_val_hash[k] = nil if v.kind_of?(String) and v.empty?}
-
# TODO: if not using c_ prfix remove from view and remobe below
-
attr_val_hash = attr_val_hash.inject({}) do |h,(k,v)|
-
h.merge(k.gsub(/^c__[0-9]+__/,"") => v)
-
end
-
attribute_rows = AttributeComplexType.ravel_raw_post_hash(attr_val_hash,:attribute)
-
Attribute.update_and_propagate_attributes(target_idh.createMH(:attribute),attribute_rows)
-
######
-
pending_changes = StateChange.flat_list_pending_changes(target_idh)
-
if pending_changes.empty?
-
run_javascript("R8.IDE.showAlert('No Pending Changes to Commit');")
-
return {}
-
end
-
-
top_level_task = Task.create_from_pending_changes(target_idh,pending_changes)
-
-
# TODO: need to sync Violation with analysis done in group by
-
# TODO: just need to check if anything returned missing values
-
errors = Violation.find_missing_required_attributes(top_level_task)
-
# TODO: removing for time being
-
# if errors
-
if false #errors
-
pp [:errors,errors]
-
error_list = []
-
# TODO: stub
-
i18n = {
-
"MissingRequiredAttribute"=>'is missing required Attribute'
-
}
-
alert_msg = "'Commit errors for missing attrs'"
-
error_str = "Commit errors for missing attrs<br/>"
-
errors.each { |e|
-
error_name = Aux::demodulize(e.class.to_s)
-
case error_name
-
when "MissingRequiredAttribute"
-
error_description = "Component <b>#{e[:component_name]}</b> on node <b>#{e[:node_name]}</b> "+i18n[error_name]+"#{e[:attribute_name]}"
-
end
-
# TODO: revisit when fully implementing notifications/feed, right now warnings on component add are different then commit errors
-
e[:name] = error_name
-
e[:target_node_id] = e[:node_id]
-
e[:description] = error_description
-
e[:type] = "error"
-
error_list << e
-
}
-
run_javascript("R8.IDE.showAlert(#{alert_msg});")
-
error_list_json = JSON.generate(error_list)
-
# run_javascript("R8.Notifications.addErrors(#{error_list_json});")
-
return {}
-
end
-
-
test_str = "pending changes:\n"
-
pending_changes.each do |sc|
-
test_str << " type=#{sc[:type]}; id=#{(sc[:component]||sc[:node])[:id].to_s}; name=#{(sc[:component]||sc[:node])[:display_name]||'UNSET'}\n"
-
end
-
-
top_level_task.save!()
-
-
workflow = Workflow.create(top_level_task)
-
workflow.defer_execution()
-
-
run_javascript("R8.IDE.showAlert('Commit Logged,Pending Execution');")
-
return {
-
'data'=>test_str
-
}
-
end
-
-
1
def commit(datacenter_id=nil)
-
commit_tree = Hash.new
-
if datacenter_id
-
# TODO: Move pending changes retrive inside of create_commit_task
-
pending_changes = flat_list_pending_changes_in_datacenter(datacenter_id.to_i)
-
unless pending_changes.empty?
-
# TODO: cleanup interface to tasks/pending changes
-
# commit_task = create_commit_task()
-
# default gets all pending changes since last commit by user
-
-
# group can be either datacenter, node_group, node,
-
# later figure out how to
-
# commit_task = create_commit_task(group_id)
-
commit_task = create_task_from_pending_changes(pending_changes)
-
commit_task.save!()
-
=begin
-
POSSIBLE CHANGES TO HASH
-
-task_id to id
-
-
-
=end
-
-
-
# default if nothing passed is json, make extensible for xml formatting for tuture possible integrations
-
# commit_tree = top_level_task.render_commit_tree()
-
# commit_tree = top_level_task.render_commit_tree('xml | json')
-
commit_tree = commit_task.render_form()
-
add_i18n_strings_to_rendered_tasks!(commit_tree)
-
# pp [:commit_tree,commit_tree]
-
delete_instance(commit_task.id())
-
end
-
end
-
-
# tpl = R8Tpl::TemplateR8.new("workspace/commit_test",user_context())
-
# panel_id = request.params['panel_id']
-
-
tpl = R8Tpl::TemplateR8.new("workspace/commit",user_context())
-
tpl.assign(:_app,app_common())
-
-
# TODO: using datacenters as environments right now, redo later on
-
dc_hash = get_object_by_id(datacenter_id,:datacenter)
-
if dc_hash[:type] == 'production'
-
commit_content = '<tr><td class="label">Maintenance Window</td></tr>
-
<tr><td class="field"><select id="commit_date" name="commit_date">
-
<option value="foo">Prime Window - Tue 10pm</option>
-
<option value="foo">Weekly Window - Fri 8pm</option>
-
<option value="foo">Weekend Warrior - Saturday 8pm</option>
-
</select></td></tr>'
-
submit_label = "Schedule Changes"
-
else
-
commit_content = ""
-
submit_label = "Commit"
-
end
-
tpl.assign(:commit_content,commit_content)
-
tpl.assign(:submit_label,submit_label)
-
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/commit.tool')
-
include_js('external/jquery.treeview')
-
include_css('jquery.treeview')
-
# include_js('plugins/user.component')
-
# run_javascript('setTimeout(initUserForm,500);')
-
commit_tree_json = JSON.generate(commit_tree)
-
-
run_javascript("R8.CommitTool.init();")
-
run_javascript("R8.CommitTool.renderTree(#{commit_tree_json},'edit','change-list-tab-content');")
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def commit_ide(target_id=nil)
-
commit_tree = Hash.new
-
required_attr_list = Array.new
-
if target_id
-
target_idh = id_handle(target_id,:target)
-
pending_changes = StateChange.flat_list_pending_changes(target_idh)
-
unless pending_changes.empty?
-
commit_task = Task.create_from_pending_changes(target_idh,pending_changes)
-
commit_task.save!()
-
-
# handle missing required attrs
-
augmented_attr_list = Attribute.augmented_attribute_list_from_task(commit_task)
-
-
opts = {:types_to_keep => [:required]}
-
grouped_attrs = Attribute.ret_grouped_attributes!(augmented_attr_list,opts)
-
-
i18n_mapping = get_i18n_mappings_for_models(:attribute,:component)
-
required_attr_list = grouped_attrs.map do |a|
-
name = a[:display_name]
-
attr_i18n = i18n_string(i18n_mapping,:attribute,name)
-
component_i18n = i18n_string(i18n_mapping,:component,a[:component][:display_name])
-
node_i18n = a[:node][:display_name]
-
qualified_attr_i18n = "#{node_i18n}/#{component_i18n}/#{attr_i18n}"
-
{
-
:id => a[:unraveled_attribute_id],
-
:name => name,
-
:value => a[:attribute_value],
-
:i18n => qualified_attr_i18n
-
}
-
end
-
-
# default if nothing passed is json, make extensible for xml formatting for tuture possible integrations
-
# commit_tree = top_level_task.render_commit_tree()
-
# commit_tree = top_level_task.render_commit_tree('xml | json')
-
commit_tree = commit_task.render_form()
-
add_i18n_strings_to_rendered_tasks!(commit_tree)
-
delete_instance(commit_task.id())
-
end
-
end
-
-
# tpl = R8Tpl::TemplateR8.new("workspace/commit_test",user_context())
-
# panel_id = request.params['panel_id']
-
-
tpl = R8Tpl::TemplateR8.new("workspace/commit_ide",user_context())
-
tpl.assign(:_app,app_common())
-
-
if required_attr_list.length == 0
-
tpl.assign(:no_required_attrs,'<tr><td class="label"><i>No Required Attributes Missing</i></td></tr>')
-
else
-
tpl.assign(:no_required_attrs,'<tr><td class="label"><i></i></td></tr>') #TODO: to get aroudn template bug
-
end
-
tpl.assign(:required_attr_list,required_attr_list)
-
-
# TODO: using datacenters as environments right now, redo later on
-
dc_hash = get_object_by_id(target_id,:datacenter)
-
if dc_hash[:type] == 'production'
-
commit_content = '<tr><td class="label">Maintenance Window</td></tr>
-
<tr><td class="field"><select id="commit_date" name="commit_date">
-
<option value="foo">Prime Window - Tue 10pm</option>
-
<option value="foo">Weekly Window - Fri 8pm</option>
-
<option value="foo">Weekend Warrior - Saturday 8pm</option>
-
</select></td></tr>'
-
submit_label = "Schedule Changes"
-
else
-
commit_content = ""
-
submit_label = "Commit"
-
end
-
tpl.assign(:commit_content,commit_content)
-
tpl.assign(:submit_label,submit_label)
-
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/commit.tool3')
-
# include_js('plugins/commit.tool')
-
include_js('external/jquery.treeview')
-
include_css('jquery.treeview')
-
# include_js('plugins/user.component')
-
# run_javascript('setTimeout(initUserForm,500);')
-
commit_tree_json = JSON.generate(commit_tree)
-
-
run_javascript("R8.CommitTool3.init();")
-
# run_javascript("R8.CommitTool3.renderTree(#{commit_tree_json},'edit','change-list-tab-content');")
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
# TODO: just for testing this gets a datacenter id
-
1
def commit_test(datacenter_id=nil)
-
if datacenter_id
-
pending_changes = flat_list_pending_changes_in_datacenter(datacenter_id.to_i)
-
unless pending_changes.empty?
-
top_level_task = create_task_from_pending_changes(pending_changes)
-
top_level_task.save!()
-
rendered_tasks = top_level_task.render_form()
-
pp [:rendered_tasks,rendered_tasks]
-
delete_instance(top_level_task.id())
-
end
-
end
-
-
tpl = R8Tpl::TemplateR8.new("workspace/commit_test",user_context())
-
panel_id = request.params['panel_id']
-
-
include_js('plugins/commit.tool')
-
include_js('external/jquery.treeview')
-
include_css('jquery.treeview')
-
# include_js('plugins/user.component')
-
# run_javascript('setTimeout(initUserForm,500);')
-
run_javascript('R8.CommitTool.init();')
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def create_assembly()
-
-
tpl = R8Tpl::TemplateR8.new("workspace/create_assembly",user_context())
-
tpl.assign(:_app,app_common())
-
panel_id = request.params['panel_id']
-
-
where_clause = {}
-
if where_clause
-
where_clause = where_clause.inject(nil){|h,o|SQL.and(h,SQL::WhereCondition.like(o[0],"#{o[1]}%"))}
-
end
-
library_list = get_objects(:library,where_clause)
-
lib_num = 1
-
library_list.each do |library|
-
library[:name] = "Library "+lib_num.to_s if library[:name].nil?
-
lib_num = lib_num+1
-
end
-
tpl.assign(:library_list,library_list)
-
-
include_js('plugins/assembly.tool')
-
# include_js('external/jquery.treeview')
-
# include_css('jquery.treeview')
-
-
run_javascript('R8.AssemblyTool.init();')
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def create_assembly_ide()
-
-
tpl = R8Tpl::TemplateR8.new("workspace/create_assembly",user_context())
-
tpl.assign(:_app,app_common())
-
panel_id = request.params['panel_id']
-
-
where_clause = {}
-
if where_clause
-
where_clause = where_clause.inject(nil){|h,o|SQL.and(h,SQL::WhereCondition.like(o[0],"#{o[1]}%"))}
-
end
-
library_list = get_objects(:library,where_clause)
-
lib_num = 1
-
library_list.each do |library|
-
library[:name] = "Library #{(library[:display_name]||"").capitalize}"
-
lib_num = lib_num+1
-
end
-
tpl.assign(:library_list,library_list)
-
-
project = get_default_project()
-
opts = Opts.new(:project_idh => project.id_handle())
-
service_list = ServiceModule.list(opts).map do |r|
-
{
-
:id => r[:id],
-
:name => "#{r[:display_name]}#{r[:version] && "-#{r[:version]}"}"
-
}
-
end
-
=begin
-
service_list = Array.new
-
service_list = [
-
{:id=>1234,:name=>'Service One'},
-
{:id=>1235,:name=>'Service Two'}
-
]
-
=end
-
tpl.assign(:service_list,service_list)
-
-
include_js('plugins/assembly.tool2')
-
# include_js('external/jquery.treeview')
-
# include_css('jquery.treeview')
-
-
run_javascript('R8.AssemblyTool2.init();')
-
-
return {
-
:content=> tpl.render(),
-
:panel=>panel_id
-
}
-
end
-
-
1
def clone_assembly(explicit_hash=nil)
-
hash = explicit_hash || request.params
-
# TODO: stub
-
icon_info = {"images" => {"display" => "generic-assembly.png","tiny" => "","tnail" => "generic-assembly.png"}}
-
-
library_id = hash["library_id"].to_i
-
library_idh = id_handle(library_id,:library)
-
name = hash["name"] || "assembly"
-
create_row = {
-
:library_library_id => library_id,
-
:ref => name,
-
:display_name => name,
-
:ui => icon_info,
-
:type => "composite"
-
}
-
assembly_mh = library_idh.createMH(:model_name=>:component,:parent_model_name=>:library)
-
assembly_idh = Model.create_from_row(assembly_mh,create_row,:convert=>true)
-
-
# TODO: getting json rather than hash
-
item_list = JSON.parse(hash["item_list"])
-
node_idhs = item_list.map{|item|id_handle(item["id"].to_i,item["model"].to_sym)}
-
connected_links,dangling_links = Node.get_external_connected_links(node_idhs)
-
# TODO: raise error to user if dangling link
-
Log.error("dangling links #{dangling_links.inspect}") unless dangling_links.empty?
-
link_idhs = connected_links.map{|link|link.id_handle}
-
-
id_handles = node_idhs + link_idhs
-
library_object = library_idh.create_object()
-
# TODO: encapsulate some of above so ca just call library_object.clone_into(...
-
raise Error.new("library_object.clone_into_library_assembly is removed")
-
#library_object.clone_into_library_assembly(assembly_idh,id_handles)
-
return {:content => nil}
-
end
-
-
1
def clone_assembly_ide(explicit_hash=nil)
-
raise Error.new("Not implemented")
-
=begin
-
TODO: rewrite using refactored Assembly::Template.create_or_update_from_instance
-
# TODO: temp hack
-
assembly_name, service_id = ret_non_null_request_params(:name,:service_id)
-
item_list = JSON.parse(ret_non_null_request_params(:item_list))
-
icon_info = {"images" => {"display" => "generic-assembly.png","tiny" => "","tnail" => "generic-assembly.png"}}
-
-
service = id_handle(service_id,:service_module).create_object.update_object!(:display_name,:library_library_id)
-
service_module_name = service[:display_name]
-
-
# TODO remove DEMOHACK
-
node_idhs = item_list.map do |item|
-
id = item["id"].to_i
-
model = (item["model"].nil? or item["model"].empty?) ? :node : item["model"].to_sym
-
id_handle(id,model)
-
end
-
project = get_default_project()
-
Assembly::Template.create_or_update_from_instance(project,node_idhs,assembly_name,service_module_name,icon_info)
-
return {:content => nil}
-
=end
-
end
-
end
-
end
-
-
-
-
1
module Ramaze::Helper
-
1
module AssemblyHelper
-
1
r8_nested_require('assembly_helper','action')
-
1
include ActionMixin
-
-
1
def ret_assembly_object()
-
assembly_id,subtype = ret_assembly_params_id_and_subtype()
-
id_handle(assembly_id,:component).create_object(:model_name => (subtype == :instance) ? :assembly_instance : :assembly_template)
-
end
-
1
def ret_assembly_params_object_and_subtype()
-
assembly_id,subtype = ret_assembly_params_id_and_subtype()
-
obj = id_handle(assembly_id,:component).create_object(:model_name => (subtype == :instance) ? :assembly_instance : :assembly_template)
-
[obj,subtype]
-
end
-
-
1
def ret_workspace_object?(id_param=nil,opts={})
-
ret_assembly_instance_or_workspace_object?(id_param,:only_workspace=>true)
-
end
-
1
def ret_assembly_instance_or_workspace_object?(id_param=nil,opts={})
-
assembly_instance = ret_assembly_instance_object(id_param)
-
if ::DTK::Workspace.is_workspace?(assembly_instance)
-
assembly_instance.id_handle().create_object(:model_name => :assembly_workspace)
-
else
-
if opts[:only_workspace]
-
raise ::DTK::ErrorUsage.new("The command can ony be applied to a workspace")
-
end
-
assembly_instance
-
end
-
end
-
-
1
def ret_assembly_instance_object?(id_param=nil)
-
id_param ||= :assembly_id
-
if assembly_id = ret_request_param_id?(id_param,::DTK::Assembly::Instance)
-
id_handle(assembly_id,:component).create_object(:model_name => :assembly_instance)
-
end
-
end
-
1
def ret_assembly_instance_object(id_param=nil)
-
id_param ||= :assembly_id
-
assembly_id = ret_request_param_id(id_param,::DTK::Assembly::Instance)
-
id_handle(assembly_id,:component).create_object(:model_name => :assembly_instance)
-
end
-
1
def ret_assembly_template_object(id_param=nil)
-
id_param ||= :assembly_id
-
assembly_id = ret_request_param_id(id_param,::DTK::Assembly::Template)
-
id_handle(assembly_id,:component).create_object(:model_name => :assembly_template)
-
end
-
-
1
def ret_assembly_params_id_and_subtype()
-
subtype = (ret_request_params(:subtype)||:instance).to_sym
-
assembly_id = ret_request_param_id(:assembly_id,subtype == :instance ? ::DTK::Assembly::Instance : ::DTK::Assembly::Template)
-
[assembly_id,subtype]
-
end
-
-
1
def ret_assembly_subtype()
-
(ret_request_params(:subtype)||:instance).to_sym
-
end
-
-
1
def ret_port_object(param,assembly_idh,conn_type)
-
extra_context = {:assembly_idh => assembly_idh,:connection_type => conn_type}
-
create_obj(param,::DTK::Port,extra_context)
-
end
-
-
1
def ret_component_id(param,context={})
-
ret_request_param_id(param,::DTK::Component,context)
-
end
-
1
def ret_component_id?(param,context={})
-
if ret_request_params(param)
-
ret_component_id(param,context)
-
end
-
end
-
1
def ret_component_id_handle(param,context={})
-
id = ret_component_id(param,context)
-
id_handle(id,:component)
-
end
-
-
1
def ret_node_id(node_name_param,assembly)
-
ret_node_id_handle(node_name_param,assembly).get_id()
-
end
-
1
def ret_node_id_handle(node_name_param,assembly)
-
ret_request_param_id_handle(node_name_param,::DTK::Node,assembly.id())
-
end
-
-
1
def ret_node_or_group_member_id_handle(node_name_param,assembly)
-
node_name_or_id = ret_non_null_request_params(:node_id)
-
if node_name_or_id =~ /^[0-9]+$/
-
ret_request_param_id_handle(node_name_param,::DTK::Node,assembly.id())
-
else
-
nodes = assembly.info_about(:nodes)
-
matching_nodes = nodes.select{|node| node[:display_name].eql?(node_name_or_id)}
-
-
matching_id =
-
if matching_nodes.size == 1
-
matching_nodes.first[:id]
-
elsif matching_nodes.size > 2
-
raise ::DTK::ErrorNameAmbiguous.new(node_name_or_id,matching_nodes.map{|r|r[:id]},:node)
-
else
-
raise ::DTK::ErrorNameDoesNotExist.new(node_name_or_id,:node)
-
end
-
-
id_handle(matching_id,:node)
-
end
-
end
-
-
##
-
# Pass param name containing with comma seperated names or ids. Param name should
-
# resolve to command seperated node id/names (String)
-
#
-
# Returns: Returns array of node id handles
-
#
-
1
def ret_node_id_handles(node_name_param, assembly)
-
Log.error("check if works for node groups")
-
# get nodes list (command seperated) from request
-
target_nodes_str = ret_request_params(node_name_param)
-
return [] unless target_nodes_str
-
# if node names exist, split them and remove extra spaces
-
target_nodes = target_nodes_str.split(',').collect do |node_name|
-
ret_id_handle_from_value(node_name.strip, ::DTK::Node, assembly.id())
-
end
-
-
target_nodes
-
end
-
-
# assuming that service link is identified by either
-
#:service_link_id or
-
#:service_type and :input_component_id or
-
#:dependency_type, :input_component_id, and :output_component_id
-
1
def ret_port_link(assembly=nil)
-
assembly ||= ret_assembly_instance_object()
-
if ret_request_params(:service_link_id)
-
create_obj(:service_link_id,::DTK::PortLink,:assembly_idh => assembly.id_handle())
-
else
-
filter = {:input_component_id => ret_component_id(:input_component_id, :assembly_id => assembly.id())}
-
if service_type = (ret_request_params(:dependency_name)||ret_request_params(:service_type))
-
filter.merge!(:service_type => service_type)
-
end
-
if ret_request_params(:output_component_id)
-
filter.merge!(:output_component_id => ret_component_id(:output_component_id, :assembly_id => assembly.id()))
-
end
-
assembly.get_matching_port_link(filter)
-
end
-
end
-
-
# validates param_settings and returns array of setting objects
-
# order determines order it is applied
-
1
def ret_settings_objects(assembly_template)
-
ret = ::DTK::ServiceSetting::Array.new()
-
unless param_settings_json = ret_request_params(:settings_json_form)
-
return ret
-
end
-
param_settings = ::DTK::Aux.json_parse(param_settings_json)
-
-
# indexed by display_name
-
ndx_existing_settings = assembly_template.get_settings().inject(Hash.new) do |h,s|
-
h.merge(s[:display_name] => s)
-
end
-
bad_settings = Array.new
-
param_settings.each do |param_setting|
-
unless setting_name = param_setting['name']
-
raise ::DTK::ErrorUsage.new("Ill-formed service settings string")
-
end
-
if setting = ndx_existing_settings[setting_name]
-
if parameters = param_setting['parameters']
-
setting.bind_parameters!(parameters)
-
end
-
ret << setting
-
else
-
bad_settings << setting_name
-
end
-
end
-
unless bad_settings.empty?
-
raise ::DTK::ErrorUsage.new("Provided service settings (#{bad_settings.join(',')}) are not defined; legal settings are: #{ndx_existing_settings.keys.join(',')}")
-
end
-
ret
-
end
-
-
# returns [assembly_template_name,service_module_name]; if cannot find one or both or these nil is returned in the associated element
-
1
def get_template_and_service_names_params(assembly)
-
assembly_template_name,service_module_name = ret_request_params(:assembly_template_name,:service_module_name)
-
module_namespace = nil
-
# either they both should be null or neither; however using 'or', rather than 'and' for robustness
-
if assembly_template_name.nil? or service_module_name.nil?
-
if parent_template = assembly.get_parent()
-
assembly_template_name = parent_template[:display_name]
-
if service_module = parent_template.get_service_module()
-
service_module_name = service_module[:display_name]
-
service_module.update_object!(:namespace)
-
module_namespace = service_module[:namespace][:name]
-
end
-
end
-
end
-
[assembly_template_name,service_module_name,module_namespace]
-
end
-
end
-
-
1
def ret_attribute_settings_hash()
-
yaml_content = ret_non_null_request_params(:settings_yaml_content)
-
response = ::DTK::Aux.convert_to_hash( yaml_content ,:yaml)
-
process_attributes!(response)
-
raise response if response.kind_of?(::DTK::Error)
-
response
-
end
-
-
1
def process_attributes!(response)
-
# we are assigning assembly wide components to assembly wide node
-
response['assembly_wide/'] = response.delete('components') if response.has_key?('components')
-
-
nodes = response.delete('nodes')||{}
-
nodes.each do |n_name, node|
-
node_cmps = node.delete('components')||{}
-
nodes[n_name] = node_cmps
-
node_cmps.each do |cmp_name, n_cmp|
-
n_cmp_attrs = n_cmp.delete('attributes')||{}
-
nodes[n_name][cmp_name] = n_cmp_attrs
-
end
-
-
node_attrs = node.delete('attributes')||{}
-
nodes[n_name].merge!(node_attrs)
-
end
-
-
assembly_wide = response.delete('assembly_wide/')||{}
-
assembly_wide.each do |cmp_name, n_cmp|
-
n_cmp_attrs = n_cmp.delete('attributes')||{}
-
assembly_wide[cmp_name] = n_cmp_attrs
-
end
-
-
response.merge!(nodes) if nodes
-
response.merge!(assembly_wide) if assembly_wide
-
response
-
end
-
-
1
def info_about_filter()
-
end
-
-
# checks element through set of fields
-
1
def element_matches?(element,path_array, element_id_val)
-
return true if (element_id_val.nil? || element_id_val.empty?)
-
return false if element.nil?
-
temp_element = element
-
path_array.each do |field|
-
temp_element = temp_element[field]
-
return false if temp_element.nil?
-
end
-
temp_element == element_id_val.to_i
-
end
-
-
-
end
-
-
1
module Ramaze::Helper
-
1
module AssemblyHelper
-
##
-
# helpers related to assembly command and control actions
-
1
module ActionMixin
-
# creates a queue object, initiates action that will push results on queue
-
# if any errors, it wil just push error conditions on queue
-
1
def initiate_action(action_queue_class, assembly, params={}, node_pattern={})
-
InitiateAction.block(action_queue_class,params) do |action_queue|
-
nodes = ret_matching_nodes(assembly, node_pattern)
-
action_queue.initiate(nodes, params)
-
end
-
end
-
1
def initiate_action_with_nodes(action_queue_class,nodes,params={},&block)
-
InitiateAction.block(action_queue_class,params) do |action_queue|
-
block.call if block
-
action_queue.initiate(nodes, params)
-
end
-
end
-
1
def initiate_execute_tests(action_queue_class, params={})
-
InitiateAction.execute_tests_block(action_queue_class,params) do |action_queue|
-
action_queue.initiate
-
end
-
end
-
1
module InitiateAction
-
1
def self.block(action_queue_class,params,&block)
-
opts = ::DTK::Aux.hash_subset(params,:agent_action)
-
action_queue = action_queue_class.new(opts)
-
begin
-
block.call(action_queue)
-
rescue ::DTK::ErrorUsage => e
-
action_queue.push(:error,e.message)
-
end
-
action_queue
-
end
-
-
1
def self.execute_tests_block(action_queue_class,params,&block)
-
action_queue = action_queue_class.new(params)
-
begin
-
block.call(action_queue)
-
rescue ::DTK::ErrorUsage => e
-
return action_queue
-
end
-
action_queue
-
end
-
end
-
-
1
def ret_matching_nodes(assembly, node_pattern_x = {})
-
# removing and empty or nil filters
-
node_pattern = (node_pattern_x ? node_pattern_x.reject { |k, v| v.nil? || v.empty? } : {})
-
-
# TODO: can handle more efficiently than getting all nodes and filtering
-
nodes = assembly.get_leaf_nodes()
-
nodes.delete_if { |node| node[:type].eql?('assembly_wide') }
-
-
if node_pattern.empty?
-
nodes
-
else
-
ret =
-
if node_pattern.is_a?(Hash) && node_pattern.size == 1
-
case node_pattern.keys.first
-
when :node_name
-
node_name = node_pattern.values.first
-
MatchingNodes.filter_by_name(nodes, node_name)
-
when :node_id
-
node_id = node_pattern.values.first
-
MatchingNodes.filter_by_id(nodes, node_id)
-
when :node_identifier
-
node_identifier = node_pattern.values.first
-
if node_identifier =~ /^[0-9]+$/
-
MatchingNodes.filter_by_id(nodes, node_identifier)
-
else
-
MatchingNodes.filter_by_name(nodes, node_identifier)
-
end
-
end
-
end
-
ret || raise(::DTK::ErrorUsage.new("Unexpected form of node_pattern"))
-
end
-
end
-
-
1
module MatchingNodes
-
1
def self.filter_by_id(nodes,node_id)
-
node_id = node_id.to_i
-
# unless match = nodes.find{|n|n.id == node_id}
-
unless match = nodes.select{|n|n.id.to_s.start_with?(node_id.to_s)}
-
raise ::DTK::ErrorUsage.new("No node matches id (#{node_id})")
-
end
-
match
-
end
-
1
def self.filter_by_name(nodes,node_name)
-
# unless match = nodes.find{|n|n.assembly_node_print_form() == node_name}
-
unless match = nodes.select{|n|n.assembly_node_print_form().start_with?(node_name)}
-
raise ::DTK::ErrorUsage.new("No node matches name (#{node_name})")
-
end
-
match
-
end
-
end
-
-
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module BundleAndReturnHelper
-
1
class ControllerResults < Hash
-
end
-
-
1
class ControllerResultsRest < ControllerResults
-
1
def initialize(result)
-
10
replace(result)
-
end
-
end
-
-
1
class ControllerResultsWeb < ControllerResults
-
1
def initialize()
-
super
-
replace(:as_run_list => Array.new)
-
end
-
1
def add(action_namespace,ctrl_result)
-
self[action_namespace] = ctrl_result
-
self[:as_run_list] << action_namespace
-
end
-
end
-
-
1
def include_css(css_name)
-
@css_includes << R8::Config[:base_css_uri] + '/' + css_name + '.css'
-
end
-
-
# TODO: augment with priority param when necessary
-
1
def include_js(js_name)
-
@js_includes << R8::Config[:base_js_uri] + '/' + js_name + '.js'
-
end
-
-
1
def include_js_tpl(js_tpl_name)
-
@js_includes << R8::Config[:base_js_uri] + '/cache/' + js_tpl_name
-
end
-
-
1
def add_js_exe(js_content)
-
@js_exe_list << js_content
-
end
-
-
1
def run_javascript(js_content)
-
@js_exe_list << js_content
-
end
-
-
1
def ret_js_includes()
-
includes_ret = @js_includes
-
@js_includes = Array.new
-
return includes_ret
-
end
-
-
1
def ret_css_includes()
-
includes_ret = @css_includes
-
@css_includes = Array.new
-
return includes_ret
-
end
-
-
1
def ret_js_exe_list()
-
exe_list = @js_exe_list
-
@js_exe_list = Array.new
-
return exe_list
-
end
-
######
-
end
-
end
-
# TODO: needs cleanup including around mechanism to get object associated with ids
-
1
module Ramaze::Helper
-
1
module Common
-
1
include XYZ #TODO: included because of ModelHandle and Model
-
-
1
def create_object_from_id(id,model_name_or_class=nil,opts={})
-
1
model_name =
-
1
if model_name_or_class.nil? then model_name()
-
elsif model_name_or_class.kind_of?(Symbol) then model_name_or_class
-
else #it is a model class
-
ret_module_name_from_class(model_name_or_class)
-
end
-
1
id_handle(id,model_name).create_object(opts.merge(:controller_class => self.class))
-
end
-
-
1
def user_object()
-
22
ret = user
-
22
if ret.class == nil
-
1
if R8::Config[:development_test_user]
-
c = ret_session_context_id()
-
ret = @test_user ||= XYZ::User.get_user(ModelHandle.new(c,:user),R8::Config[:development_test_user])
-
end
-
end
-
22
ret
-
end
-
-
1
def default_namespace()
-
# CurrentSession.new().get_user_object().get_namespace()
-
# we don't want username as default namespace, we will use tenant unique name instead
-
::DTK::Common::Aux.running_process_user()
-
end
-
-
1
def model_handle(model_name_x=model_name())
-
9
user_obj = user_object()
-
9
ModelHandle.create_from_user(user_obj,model_name_x)
-
end
-
-
1
def model_handle_with_private_group(model_name_x=model_name())
-
user_obj = user_object()
-
ret = ModelHandle.create_from_user(user_obj,model_name_x)
-
group_obj = UserGroup.get_private_group(ret.createMH(:user_group),user_obj[:username])
-
ret.merge(:group_id => group_obj[:id])
-
end
-
-
# looks for default if no target is given
-
1
def create_target_instance_with_default(target_id_field=:target_id,model_class=nil)
-
if target_id = ret_request_params(target_id_field)
-
create_obj(target_id_field,::DTK::Target::Instance)
-
else
-
Target::Instance.get_default_target(model_handle(:target))
-
end
-
end
-
1
def target_idh_with_default(target_id=nil)
-
if target_id
-
id_handle(target_id,:target)
-
else
-
if default_target = Target::Instance.get_default_target(model_handle(:target))
-
default_target.id_handle()
-
else
-
raise DTK::ErrorUsage.new("If an explicit target is not given (with option '-t TARGET'), this command uses the default target, but a default target has not been set")
-
end
-
end
-
end
-
-
1
def get_default_project()
-
3
projects = ::DTK::Project.get_all(model_handle(:project))
-
3
if projects.empty?
-
raise DTK::Error.new("Cannot find any projects")
-
elsif projects.size > 1
-
raise DTK::Error.new("Not implemented yet: case when multiple projects")
-
end
-
3
projects.first
-
end
-
1
private
-
-
# helpers that interact with model
-
1
def get_objects(model_name,where_clause={},opts={})
-
model_name = :datacenter if model_name == :target #TODO: remove temp datacenter->target
-
model_class(model_name).get_objects(model_handle(model_name),where_clause,opts)
-
end
-
-
1
def get_object_by_id(id,model_name_x=model_name())
-
get_objects(model_name_x,{:id => id}).first
-
end
-
-
1
def update_from_hash(id,hash,opts={})
-
idh = id_handle(id,model_name,hash["display_name"])
-
model_class(model_name).update_from_hash_assignments(idh,Aux.col_refs_to_keys(hash),opts)
-
end
-
-
1
def create_from_hash(parent_id_handle,hash)
-
new_id = model_class(model_name).create_from_hash(parent_id_handle,hash).map{|x|x[:id]}.first
-
Log.info("created new object with id #{new_id}") if new_id
-
new_id
-
end
-
-
1
def delete_instance(id)
-
c = ret_session_context_id()
-
Model.delete_instance(IDHandle[:c => c, :id => id,:model_name => model_name()])
-
end
-
-
1
def id_handle(id,i_model_name=model_name(),display_name=nil)
-
1
model_name = :datacenter if model_name == :target #TODO: remove temp datacenter->target
-
1
c = ret_session_context_id()
-
1
hash = {:c => c,:guid => id.to_i, :model_name => i_model_name.to_sym}
-
1
hash.merge!(:display_name => display_name) if display_name
-
1
idh = IDHandle.new(hash,{:set_parent_model_name => true})
-
1
obj = idh.create_object().update_object!(:group_id)
-
1
idh.merge!(:group_id => obj[:group_id]) if obj[:group_id]
-
1
idh
-
end
-
-
1
def top_level_factory_id_handle()
-
c = ret_session_context_id()
-
IDHandle[:c => c,:uri => "/#{model_name()}", :is_factory => true]
-
end
-
-
1
def top_id_handle(opts={})
-
c = ret_session_context_id()
-
idh = IDHandle[:c => c,:uri => "/"]
-
idh.merge!(:group_id => opts[:group_id]) if opts[:group_id]
-
idh
-
end
-
-
1
def ret_id_from_uri(uri)
-
c = ret_session_context_id()
-
IDHandle[:c => c, :uri => uri].get_id()
-
end
-
-
# request parsing fns
-
# TODO: may deprecat; befoe so would have to remove from call in some list views in display
-
1
def ret_where_clause(field_set=Model::FieldSet.all_real(model_name()))
-
hash = ret_hash_for_where_clause()
-
hash ? field_set.ret_where_clause_for_search_string(hash.reject{|k,v|k == :parent_id}) : nil
-
end
-
-
1
def ret_parent_id()
-
(ret_hash_for_where_clause()||{})[:parent_id]
-
end
-
-
1
def ret_order_by_list()
-
# TODO: handle case when this is a get
-
# TODO: filter fields to make sure real fields or treat virtual columns
-
saved_search = ret_saved_search_in_request()
-
return nil unless (saved_search||{})["order_by"]
-
saved_search["order_by"].map{|x|{:field => x["field"].to_sym, :order => x["order"]}}
-
end
-
-
# TODO: just for testing
-
1
TestOveride = 100# nil
-
1
LimitDefault = 20
-
1
NumModelItemsDefault = 10000
-
1
def ret_paging_info()
-
# TODO: case on request_method_is_post?()
-
# TODO: might be taht query is optimzied by not having start being 0 included
-
saved_search = ret_saved_search_in_request()
-
# TODO: just for testing
-
if TestOveride and (saved_search||{})["start"].nil?
-
return {:start => 0, :limit => TestOveride, :num_model_items => NumModelItemsDefault}
-
end
-
return nil unless saved_search
-
return nil unless saved_search["start"] or saved_search["limit"]
-
start = (saved_search["start"]||0).to_i
-
limit = (saved_search["limit"] || R8::Config[:page_limit] || LimitDefault).to_i
-
# TODO: just for testing
-
limit = TestOveride if TestOveride
-
num_model_items = (saved_search["num_model_items"] || NumModelItemsDefault)
-
{:start => start, :limit => limit, :num_model_items => num_model_items}
-
end
-
-
1
def ret_model_for_list_search(field_set)
-
request_params = ret_request_params()||{}
-
field_set.cols.inject({}){|ret,field|ret.merge(field => request_params[field]||'')}
-
end
-
-
1
def ret_request_params_filter()
-
json_form = (ret_request_params()||{})["search"]
-
search = convert_search_item_from_json(json_form)
-
search && check_and_convert_filter_form(search["filter"])
-
end
-
-
1
def ret_saved_search_in_request()
-
json_form = (ret_request_params()||{})["saved_search"]
-
convert_search_item_from_json(json_form)
-
end
-
-
1
def convert_search_item_from_json(item)
-
unless item.nil? or item.empty?
-
JSON.parse(item)
-
end
-
end
-
-
1
def check_and_convert_filter_form(filter)
-
ret = convert_filter_form(filter)
-
raise ErrorUsage.new("Filter having form (#{filter.inspect}) not treated") if ret.nil?
-
ret
-
end
-
-
1
def convert_filter_form(filter)
-
if filter.kind_of?(Array) and filter.size == 3
-
if filter[0].to_sym == :eq and filter[2].to_s =~ /^[0-9]+$/
-
[filter[0].to_sym, filter[1].to_sym, filter[2].to_i]
-
end
-
end
-
end
-
-
1
def ret_hash_for_where_clause()
-
request_method_is_get?() ? ret_parsed_query_string_when_get() : ret_request_params()
-
end
-
-
1
def ret_parsed_query_string_when_get()
-
explicit_qs = ret_parsed_query_string_from_uri()
-
return @parsed_query_string if explicit_qs.nil? or explicit_qs.empty?
-
return explicit_qs if @parsed_query_string.nil? or @parsed_query_string.empty?
-
@parsed_query_string
-
end
-
-
-
# TODO needs refinement
-
1
def ret_parsed_query_string_from_uri()
-
10
ret = Hash.new
-
10
query_string = ret_query_string()
-
10
return ret unless query_string
-
# TBD: not yet looking for errors in the query string
-
10
query_string.scan(%r{([/A-Za-z0-9_]+)=([/A-Za-z0-9_]+)}) do
-
key = $1.to_sym
-
value = $2
-
if value == "true"
-
ret[key] = true
-
elsif value == "false"
-
ret[key] = false
-
elsif value =~ /^[0-9]+$/
-
ret[key] = value #should be converted into an integer
-
else
-
ret[key] = value
-
# TODO find where value shoudl be sym ret[key] = value.to_sym
-
end #TBD: not complete; for example not for decimals
-
end
-
10
ret
-
end
-
-
1
def ret_query_string()
-
10
request.env["QUERY_STRING"]
-
end
-
-
# TODO: these three methods below need some cleanup
-
# param refers to key that can have id or name value
-
1
def create_obj(param,model_class=nil,extra_context=nil)
-
create_object_from_id(ret_request_param_id(param,model_class,extra_context),model_class)
-
end
-
-
# param refers to key that can have id or name value
-
1
def ret_request_param_id_handle(param, model_class=nil, version=nil)
-
id = ret_request_param_id(param,model_class,version)
-
id_handle(id,ret_module_name_from_class(model_class))
-
end
-
-
1
def ret_id_handle_from_value(id_or_name_value, model_class=nil, extra_context=nil)
-
id = resolve_id_from_name_or_id(id_or_name_value, model_class, extra_context)
-
id_handle(id,ret_module_name_from_class(model_class))
-
end
-
# TODO One part of cleanup is to have name_to_id and check_valid return the object with keys :id and :group id
-
# we can put in an option flag for this, but need to check we cover all instances of these
-
# make this a speacte function called by create_obj and then have
-
# ret_request_param_id_handle and ret_request_param_id call id and id_handle methods on it
-
# which avoids needing to call create_object_from_id in create_obj
-
-
# param refers to key that can have id or name value
-
1
def ret_request_param_id?(param, model_class=nil, extra_context=nil)
-
if id_or_name = ret_request_params(param)
-
resolve_id_from_name_or_id(id_or_name, model_class, extra_context)
-
end
-
end
-
1
def ret_request_param_id(param, model_class=nil, extra_context=nil)
-
id_or_name = ret_non_null_request_params(param)
-
resolve_id_from_name_or_id(id_or_name, model_class, extra_context)
-
end
-
-
1
def resolve_id_from_name_or_id(id_or_name, model_class=nil, extra_context=nil)
-
1
model_name = ret_module_name_from_class(model_class)
-
1
model_class ||= model_class(model_name)
-
1
model_handle = model_handle(model_name)
-
-
1
if id_or_name.kind_of?(Fixnum) or id_or_name =~ /^[0-9]+$/
-
id = id_or_name.to_i
-
params = [model_handle,id]
-
params << extra_context if extra_context
-
model_class.check_valid_id(*params)
-
else
-
1
params = [model_handle,id_or_name]
-
1
params << extra_context if extra_context
-
1
model_class.name_to_id(*params)
-
end
-
end
-
-
# resolve name/id but in this case given request param is not required
-
1
def ret_request_param_id_optional(param,model_class=nil,extra_context=nil)
-
if ret_request_params(param)
-
ret_request_param_id(param, model_class, extra_context)
-
end
-
end
-
-
1
def ret_module_name_from_class(model_class=nil)
-
1
if model_class
-
::DTK::Model::SubclassProcessing.model_name(model_class) || Aux.underscore(Aux.demodulize(model_class.to_s)).to_sym
-
else
-
1
model_name()
-
end
-
end
-
1
private :ret_module_name_from_class
-
-
-
1
def ret_request_params(*params)
-
18
return nil unless request_method_is_post?()
-
18
return request.params if params.size == 0
-
36
ret = params.map { |p| request.params[p.to_s] }
-
18
ret.size == 1 ? ret.first : ret
-
end
-
-
1
def ret_request_params_force_nil(*params)
-
ret = ret_request_params(params)
-
ret = [*ret].collect { |v| v.empty? ? nil : v }
-
ret.size <= 1 ? ret.first : ret
-
end
-
-
1
def ret_request_param_boolean(param)
-
boolean_form(ret_request_params(param))
-
end
-
1
def ret_symbol_params_hash(*params)
-
ret_params_hash(*params).inject(Hash.new){|h,(k,v)|h.merge(k => v.to_s.to_sym)}
-
end
-
1
def ret_boolean_params_hash(*params)
-
ret_params_hash(*params).inject(Hash.new){|h,(k,v)|h.merge(k => boolean_form(v))}
-
end
-
1
def boolean_form(v)
-
v.kind_of?(TrueClass) or (v.kind_of?(String) and v == "true")
-
end
-
1
private :boolean_form
-
-
1
def ret_non_null_request_params(*params)
-
1
return nil unless request_method_is_post?()
-
1
null_params = Array.new
-
1
ret = params.map do |p|
-
1
unless val = request.params[p.to_s]
-
null_params << p
-
1
else val
-
end
-
end
-
1
raise_error_null_params?(*null_params)
-
1
ret.size == 1 ? ret.first : ret
-
end
-
-
1
def ret_params_hash(*params)
-
ret = Hash.new
-
return ret unless request_method_is_post?()
-
return ret if params.size == 0
-
params.inject(Hash.new) do |h,p|
-
val = request.params[p.to_s]
-
(val ? h.merge(p.to_sym => val) : h)
-
end
-
end
-
-
# method will use nil where param empty
-
1
def ret_params_hash_with_nil(*params)
-
ret = Hash.new
-
return ret unless request_method_is_post?()
-
return ret if params.size == 0
-
params.inject(Hash.new) do |h,p|
-
val = request.params[p.to_s]
-
val = nil if val.empty?
-
(val ? h.merge(p.to_sym => val) : h)
-
end
-
end
-
-
1
def ret_params_av_pairs()
-
pattern,value,av_pairs_hash = ret_request_params(:pattern,:value,:av_pairs_hash)
-
ret = Array.new
-
if av_pairs_hash
-
av_pairs_hash.each{|k,v|ret << {:pattern => k, :value => v}}
-
elsif pattern
-
ret = [{:pattern => pattern, :value => value}]
-
else
-
raise ::DTK::ErrorUsage.new("Missing parameters")
-
end
-
ret
-
end
-
-
1
def node_binding_ruleset?(node_template_identifier_param, node_binding_identifier=nil)
-
if node_binding_identifier ||= ret_request_params(node_template_identifier_param)
-
unless node_binding_rs_id = NodeBindingRuleset.name_to_id(model_handle(:node_binding_ruleset),node_binding_identifier)
-
raise ::DTK::ErrorUsage.new("Illegal node template indentifier (#{node_binding_identifier })")
-
end
-
create_object_from_id(node_binding_rs_id,:node_binding_ruleset)
-
end
-
end
-
-
1
def ret_component_template(param,opts={})
-
component_template,component_title = ret_component_template_and_title(param,opts)
-
if component_title
-
raise ::DTK::ErrorUsage.new("Component title should not be given")
-
end
-
component_template
-
end
-
# returns [component_template, component_title] where component_title could be nil
-
1
def ret_component_template_and_title(param,opts={})
-
version = opts[:versions]||opts[:version]
-
component_template_idh = ret_request_param_id_handle(param,::DTK::Component::Template,version)
-
component_template = component_template_idh.create_object(:model_name => :component_template)
-
component_title = ::DTK::ComponentTitle.parse_title?(ret_non_null_request_params(param))
-
[component_template,component_title]
-
end
-
-
1
def ret_component_template_and_title_for_assembly(param,assembly)
-
opts = {:versions => [::DTK::ModuleVersion.ret(assembly),nil]} #so first tries the assembly module context and then the component module context
-
ret_component_template_and_title(param,opts)
-
end
-
-
1
def ret_component_title?(component_name)
-
::DTK::ComponentTitle.parse_title?(component_name)
-
end
-
-
1
def raise_error_null_params?(*null_params)
-
1
unless null_params.empty?
-
error_msg = (null_params.size == 1 ? "Rest post parameter (#{null_params.first}) is missing" : "Rest post parameters (#{null_params.join(',')} are missing")
-
raise ErrorUsage.new(error_msg)
-
end
-
end
-
-
1
def request_method_is_get?()
-
request.env["REQUEST_METHOD"] == "GET"
-
end
-
1
def request_method_is_post?()
-
19
request.env["REQUEST_METHOD"] == "POST"
-
end
-
-
# R8 functions
-
1
def set_template_defaults_for_list!(tpl)
-
tpl.assign(:list_start_prev, 0)
-
tpl.assign(:list_start_next, 0)
-
tpl.assign(:search_context, nil)
-
tpl.assign(:search_content, nil)
-
tpl.assign(:_app,app_common())
-
set_template_order_columns!(tpl)
-
end
-
-
1
def set_template_paging_info!(tpl,paging_info)
-
if paging_info.empty? or paging_info.nil?
-
tpl.assign(:list_start_prev, 0)
-
tpl.assign(:list_start_next, 0)
-
return nil
-
end
-
start = paging_info[:start]; limit = paging_info[:limit]; num_model_items = paging_info[:num_model_items]
-
start_prev = ((start - limit) < 0) ? 0 : (start-limit)
-
tpl.assign(:list_start_prev, start_prev)
-
start_next = ((start + limit) > num_model_items) ? nil : (start+limit)
-
tpl.assign(:list_start_next, start_next)
-
end
-
-
1
def set_template_order_columns!(tpl,order_by_list=nil,field_set=Model::FieldSet.default(model_name()))
-
# TODO: should default field set by default or all real
-
order_by_hash = (order_by_list||[]).inject({}){|h,o|h.merge(o[:field] => o[:order])}
-
field_set.cols.each do |field|
-
sort_order = 'ASC'
-
ort_class = ''
-
if order_by_hash[field]
-
sort_order = 'DESC' if order_by_hash[field]== 'ASC'
-
sort_class = (order_by_hash[field]== 'ASC') ? 'asc' : 'desc'
-
end
-
-
tpl.assign((field.to_s+'_order').to_sym,sort_order)
-
tpl.assign((field.to_s+'_order_class').to_sym,sort_class)
-
end
-
end
-
-
-
1
def app_common()
-
{
-
:base_uri => R8::Config[:base_uri],
-
:base_css_uri => R8::Config[:base_css_uri],
-
:base_js_uri => R8::Config[:base_js_uri],
-
:base_images_uri => R8::Config[:base_images_uri],
-
:avatar_base_uri => R8::Config[:avatar_base_uri]
-
}
-
end
-
-
# aux fns
-
1
def model_class(model_name_x=model_name())
-
1
Model.model_class(model_name_x)
-
end
-
-
1
def default_action_name
-
this_parent_method.to_sym
-
end
-
-
1
def model_name
-
7
return @model_name if @model_name
-
6
model_name_x = Aux.demodulize(self.class.to_s).gsub(/Controller$/,"").downcase.to_sym
-
6
@model_name = ConvertFromSubtypeModelName[model_name_x]||model_name_x
-
end
-
-
# TODO: unify with model/subclass_processing
-
1
ConvertFromSubtypeModelName = {
-
:assembly => :component,
-
:node_group => :node
-
}
-
end
-
end
-
-
1
module Ramaze::Helper
-
1
module GeneralProcessing
-
1
def ret_session_context_id()
-
# stub
-
2
2
-
end
-
-
### user processing
-
1
def login_first
-
9
auth_violation_response() unless logged_in?
-
end
-
-
1
def auth_violation_response()
-
rest_request? ? respond('Forbidden',403) : redirect(R8::Config[:login][:path])
-
end
-
-
1
def auth_unauthorized_response(message)
-
rest_request? ? respond(message,401) : redirect(R8::Config[:login][:path])
-
end
-
-
1
def auth_forbidden_response(message)
-
rest_request? ? respond(message,403) : redirect(R8::Config[:login][:path])
-
end
-
-
1
def get_user()
-
return nil unless user.kind_of?(Hash)
-
@cached_user_obj ||= User.new(user,ret_session_context_id(),:user)
-
end
-
-
1
class UserContext
-
1
attr_reader :current_profile,:request,:json_response
-
-
1
def initialize(controller)
-
@current_profile = :default
-
@request = controller.request
-
@json_response = controller.json_response?
-
@controller = controller
-
end
-
-
1
def create_object_from_id(id)
-
@controller.create_object_from_id(id)
-
end
-
end
-
##############
-
-
1
def initialize
-
30
super
-
30
@cached_user_obj = nil
-
# TODO: see where these are used; remove if not used
-
30
@public_js_root = R8::Config[:public_js_root]
-
30
@public_css_root = R8::Config[:public_css_root]
-
30
@public_images_root = R8::Config[:public_images_root]
-
-
# TBD: may make a calls fn that declares a cached var to be 'self documenting'
-
30
@model_name = nil #cached ; called on demand
-
-
# used when action set calls actions
-
30
@parsed_query_string = nil
-
-
30
@css_includes = Array.new
-
30
@js_includes = Array.new
-
30
@js_exe_list = Array.new
-
-
30
@user_context = nil
-
-
30
@layout = nil
-
-
# if there is an action set then call by value is used to substitue in child actions; this var
-
# will be set to have av pairs set from global params given in action set call
-
30
@action_set_param_map = Hash.new
-
-
30
@ctrl_results = nil
-
end
-
-
1
def json_response?()
-
@json_response ||= rest_request?() or ajax_request?()
-
end
-
1
def rest_request?()
-
# TODO: needs to be fixed up; issue is different envs (linux versus windows) give different values for request.env["REQUEST_URI"]
-
51
@rest_request ||= (request.env["REQUEST_URI"] =~ Regexp.new("/rest/") ? true : nil)
-
end
-
1
def ajax_request?
-
10
@ajax_request ||= ajax_request_aux?()
-
end
-
1
def ajax_request_aux?()
-
10
route_pieces = request.env["PATH_INFO"].split("/")
-
10
last_piece = route_pieces[route_pieces.size-1]
-
10
return true if /\.json/.match(last_piece)
-
-
10
return true if request.params["iframe_upload"] == "1"
-
-
10
return (request.env["HTTP_X_REQUESTED_WITH"] && request.env["HTTP_X_REQUESTED_WITH"]=="XMLHttpRequest" )
-
end
-
end
-
end
-
# TODO: Temp until move to model
-
1
module Ramaze::Helper
-
1
module I18nStringMapping
-
1
include R8Tpl::Utility::I18n
-
end
-
end
-
1
module Ramaze::Helper
-
1
module ModuleHelper
-
1
include ::DTK
-
1
def rest_async_response
-
body = DeferrableBody.new
-
-
# Get the headers out there asap, let the client know we're alive...
-
EM.next_tick do
-
request.env['async.callback'].call [200, {'Content-Type' => 'text/plain'}, body]
-
end
-
-
user_object = CurrentSession.new.user_object()
-
CreateThread.defer_with_session(user_object, Ramaze::Current::session) do
-
yield(body)
-
body.succeed
-
end
-
-
throw :async
-
end
-
-
1
def get_remote_module_info_helper(module_obj)
-
remote_module_name = module_obj.get_field?(:display_name)
-
namespace = module_obj.get_field?(:namespace)
-
-
version = ret_version()
-
remote_namespace = ret_request_params(:remote_namespace) || get_existing_default_namespace?(module_obj,version) || ret_request_params(:local_namespace)
-
remote_params = remote_params_dtkn(module_obj.module_type(),remote_namespace,remote_module_name,version)
-
-
access_rights = ret_access_rights()
-
rsa_pub_key,action = ret_non_null_request_params(:rsa_pub_key,:action)
-
project = get_default_project()
-
module_ref_content = ret_request_params(:module_ref_content)
-
-
module_obj.get_linked_remote_module_info(project,action,remote_params,rsa_pub_key,access_rights,module_ref_content)
-
end
-
-
1
def get_service_dependencies(module_type, remote_params, client_rsa_pub_key=nil)
-
project = get_default_project()
-
missing_modules, required_modules, dependency_warnings = module_class(module_type).get_required_and_missing_modules(project, remote_params, client_rsa_pub_key)
-
{ :missing_modules => missing_modules, :required_modules => required_modules, :dependency_warnings => dependency_warnings }
-
end
-
-
1
def chmod_from_remote_helper()
-
component_module = create_obj(:module_id)
-
permission_selector, remote_namespace, chmod_action = ret_request_params(:permission_selector, :remote_module_namespace, :chmod_action)
-
client_rsa_pub_key = ret_non_null_request_params(:rsa_pub_key)
-
-
remote_namespace = check_remote_namespace(remote_namespace, component_module)
-
repoman_client = Repo::Remote.new().repoman_client()
-
repoman_client.chmod(module_type(component_module), component_module.display_name, remote_namespace, permission_selector, chmod_action, client_rsa_pub_key)
-
end
-
-
1
def confirm_make_public_helper()
-
component_module = create_obj(:module_id)
-
module_info, remote_namespace, public_action = ret_request_params(:module_info, :remote_module_namespace, :public_action)
-
client_rsa_pub_key = ret_non_null_request_params(:rsa_pub_key)
-
-
remote_namespace = check_remote_namespace(remote_namespace, component_module)
-
repoman_client = Repo::Remote.new().repoman_client()
-
repoman_client.confirm_make_public(module_type(component_module), module_info, public_action, client_rsa_pub_key)
-
end
-
-
1
def chown_from_remote_helper()
-
component_module = create_obj(:module_id)
-
remote_namespace = ret_request_params(:remote_module_namespace)
-
client_rsa_pub_key, remote_user = ret_non_null_request_params(:rsa_pub_key, :remote_user)
-
-
remote_namespace = check_remote_namespace(remote_namespace, component_module)
-
repoman_client = Repo::Remote.new().repoman_client()
-
repoman_client.chown(module_type(component_module), component_module.display_name, remote_namespace, remote_user, client_rsa_pub_key)
-
end
-
-
1
def collaboration_from_remote_helper
-
component_module = create_obj(:module_id)
-
users, groups,remote_namespace = ret_request_params(:users, :groups, :remote_module_namespace)
-
action, client_rsa_pub_key = ret_non_null_request_params(:action, :rsa_pub_key)
-
-
remote_namespace = check_remote_namespace(remote_namespace, component_module)
-
repoman_client = Repo::Remote.new().repoman_client()
-
repoman_client.collaboration(module_type(component_module), action, component_module.display_name, remote_namespace, users, groups, client_rsa_pub_key)
-
end
-
-
1
def list_collaboration_from_remote_helper
-
component_module = create_obj(:module_id)
-
remote_namespace = ret_request_params(:remote_module_namespace)
-
client_rsa_pub_key = ret_non_null_request_params(:rsa_pub_key)
-
-
remote_namespace = check_remote_namespace(remote_namespace, component_module)
-
repoman_client = Repo::Remote.new().repoman_client()
-
repoman_client.list_collaboration(module_type(component_module), component_module.display_name, remote_namespace, client_rsa_pub_key)
-
end
-
-
1
def pull_from_remote_helper(module_class)
-
# TODO: need to clean this up; right now not called because of code on server; not to clean up term for :remote_repo
-
Log.error("Not expecting to call pull_from_remote_helper")
-
local_module_name, remote_repo = ret_non_null_request_params(:module_name, :remote_repo)
-
version = ret_request_params(:version)
-
project = get_default_project()
-
-
module_class.pull_from_remote(project, local_module_name, remote_repo, version)
-
end
-
-
1
def install_from_dtkn_helper(module_type)
-
remote_namespace,remote_module_name,version = Repo::Remote::split_qualified_name(ret_non_null_request_params(:remote_module_name))
-
remote_params = remote_params_dtkn(module_type,remote_namespace,remote_module_name,version)
-
-
local_namespace = remote_params.namespace
-
local_module_name = ret_request_params(:local_module_name) || remote_params.module_name
-
project = get_default_project()
-
dtk_client_pub_key = ret_request_params(:rsa_pub_key)
-
-
do_not_raise = (ret_request_params(:do_not_raise) ? ret_request_params(:do_not_raise) : false)
-
ignore_component_error = (ret_request_params(:ignore_component_error) ? ret_request_params(:ignore_component_error) : false)
-
additional_message = (ret_request_params(:additional_message) ? ret_request_params(:additional_message) : false)
-
local_params = local_params(module_type,local_module_name,:namespace => local_namespace,:version => version)
-
-
dependency_warnings = []
-
-
# check for missing module dependencies
-
if !do_not_raise
-
missing_modules, required_modules, dependency_warnings = module_class(module_type).get_required_and_missing_modules(project, remote_params, dtk_client_pub_key)
-
# return missing modules if any
-
return { :missing_module_components => missing_modules, :dependency_warnings => dependency_warnings, :required_modules => required_modules } unless (missing_modules.empty? && required_modules.empty?)
-
end
-
-
opts = {:do_not_raise=>do_not_raise, :additional_message=>additional_message, :ignore_component_error=>ignore_component_error}
-
response = module_class(module_type).install(project,local_params,remote_params,dtk_client_pub_key,opts)
-
return response if response[:does_not_exist]
-
-
response.merge( { :namespace => remote_namespace, :dependency_warnings => dependency_warnings } )
-
end
-
-
1
def publish_to_dtkn_helper(module_obj)
-
client_rsa_pub_key = ret_non_null_request_params(:rsa_pub_key)
-
qualified_remote_name = ret_request_params(:remote_component_name)
-
-
module_obj.update_object!(:display_name,:namespace)
-
opts = {:namespace => module_obj[:namespace][:display_name]}
-
qualified_remote_name = module_obj[:display_name] if qualified_remote_name.to_s.empty?
-
-
namespace, remote_module_name,version = Repo::Remote.split_qualified_name(qualified_remote_name,opts)
-
local_module_name = module_obj.module_name()
-
-
# [Amar & Haris] this is temp restriction until rest of logic is properly fixed
-
if local_module_name != remote_module_name
-
raise ErrorUsage.new("Publish with remote module name (#{remote_module_name}) not equal to local module name (#{local_module_name}) is currently not supported.")
-
end
-
-
module_type = module_obj.module_type
-
remote_params = remote_params_dtkn(module_type,namespace,remote_module_name,version)
-
namespace = module_obj.module_namespace()
-
local_params = local_params(module_type,local_module_name,:namespace => namespace,:version => version)
-
module_obj.publish(local_params,remote_params,client_rsa_pub_key)
-
end
-
-
# opts can have :version and :namespace
-
1
def local_params(module_type,module_name,opts={})
-
version = opts[:version]
-
namespace = opts[:namespace] || default_local_namespace_name()
-
ModuleBranch::Location::LocalParams::Server.new(
-
:module_type => module_type,
-
:module_name => module_name,
-
:version => version,
-
:namespace => namespace
-
)
-
end
-
-
1
def remote_params_dtkn(module_type,namespace,module_name,version=nil)
-
ModuleBranch::Location::RemoteParams::DTKNCatalog.new(
-
:module_type => module_type,
-
:module_name => module_name,
-
:version => version,
-
:namespace => namespace||Namespace.default_namespace_name(),
-
:remote_repo_base => ret_remote_repo_base()
-
)
-
end
-
-
1
def default_local_namespace_name()
-
namespace_mh = get_default_project().model_handle(:namespace)
-
namespace_obj = ::DTK::Namespace::default_namespace(namespace_mh)
-
namespace_obj.get_field?(:display_name)
-
end
-
-
# this looks at connected remote repos to make an assessment; default_namespace() above is static
-
# if this is used; it is inserted by controller method
-
1
def get_existing_default_namespace?(module_obj,version=nil)
-
linked_remote_repos = module_obj.get_linked_remote_repos(:filter => {:version => version})
-
default_remote_repo = RepoRemote.ret_default_remote_repo(linked_remote_repos)
-
if default_remote_repo
-
Log.info("Found default namespace (#{default_remote_repo[:display_name]})")
-
default_remote_repo[:repo_namespace]
-
end
-
end
-
-
1
def filter_by_namespace(object_list)
-
1
module_namespace = ret_request_params(:module_namespace)
-
1
return object_list if module_namespace.nil? || module_namespace.strip.empty?
-
-
object_list.select do |el|
-
if el[:namespace]
-
# these are local modules and have namespace object
-
module_namespace.eql?(el[:namespace][:display_name])
-
else
-
el[:display_name].match(/#{module_namespace}\//)
-
end
-
end
-
end
-
-
# returns [namespace,module_name] using pf_full_name param and module or namespace request params if they are given
-
1
def ret_namespace_and_module_name_for_puppet_forge(pf_full_name)
-
param_module_name = ret_request_params(:module_name)
-
pf_namespace,pf_module_name = ::DTK::PuppetForge.puppet_forge_namespace_and_module_name(pf_full_name)
-
if param_module_name and param_module_name != pf_module_name
-
raise ErrorUsage.new("Install with module name (#{param_module_name}) not equal to puppet forge module name (#{pf_module_name}) is currently not supported.")
-
end
-
# default is to use namespace associated with puppet forge
-
[ret_request_param_module_namespace?()||pf_namespace, pf_module_name]
-
end
-
-
1
def ret_assembly_template_idh()
-
assembly_template_id, subtype = ret_assembly_params_id_and_subtype()
-
unless subtype == :template
-
raise ::DTK::Error.new("Unexpected that subtype has value (#{subtype})")
-
end
-
id_handle(assembly_template_id,:assembly_template)
-
end
-
-
1
def ret_request_param_module_namespace?(param=:module_namespace)
-
ret = ret_request_params(param)
-
# TODO: remove need for this by on client side not passing empty strings when no namespace
-
(ret.kind_of?(String) and ret.empty?) ? nil : ret
-
end
-
-
1
def ret_config_agent_type()
-
ret_request_params(:config_agent_type)|| :puppet #TODO: puppet hardwired
-
end
-
-
1
def ret_diffs_summary()
-
json_diffs = ret_request_params(:json_diffs)
-
Repo::Diffs::Summary.new(json_diffs && (!json_diffs.empty?) && JSON.parse(json_diffs))
-
end
-
-
1
def ret_remote_repo_base()
-
3
(ret_request_params(:remote_repo_base)||Repo::Remote.default_remote_repo_base()).to_sym
-
end
-
# TODO: deprecate below when all uses removed;
-
1
def ret_remote_repo()
-
(ret_request_params(:remote_repo)||Repo::Remote.default_remote_repo()).to_sym
-
end
-
-
1
def ret_access_rights()
-
if rights = ret_request_params(:access_rights)
-
Repo::Remote::AccessRights.convert_from_string_form(rights)
-
else
-
Repo::Remote::AccessRights::RW
-
end
-
end
-
-
1
def ret_library_idh_or_default()
-
if ret_request_params(:library_id)
-
ret_request_param_id_handle(:library_id,Library)
-
else
-
Library.get_public_library(model_handle(:library)).id_handle()
-
end
-
end
-
-
1
protected
-
-
1
def resolve_pull_from_remote(module_type)
-
repo_module = create_obj(:module_id)
-
opts = Opts.create?(:remote_namespace? => ret_request_params(:remote_namespace))
-
module_name, namespace, version = repo_module.get_basic_info(opts)
-
remote_params = remote_params_dtkn(module_type,namespace,module_name,version)
-
client_rsa_pub_key = ret_request_params(:rsa_pub_key)
-
-
get_service_dependencies(module_type, remote_params, client_rsa_pub_key)
-
end
-
-
1
private
-
-
1
def module_class(module_type)
-
case module_type.to_sym
-
when :component_module then ComponentModule
-
when :service_module then ServiceModule
-
when :test_module then TestModule
-
when :node_module then NodeModule
-
else raise Error.new("Unexpected module_type (#{module_type})")
-
end
-
end
-
-
1
def module_type(component_module)
-
# component_module.is_a?(ComponentModule) ? :component_module : :service_module
-
case component_module
-
when ComponentModule
-
return :component_module
-
when ServiceModule
-
return :service_module
-
when TestModule
-
return :test_module
-
when NodeModule
-
return :node_module
-
else
-
raise ErrorUsage.new("Module type '#{component_module}' is not valid")
-
end
-
end
-
-
1
def check_remote_namespace(remote_namespace, component_module)
-
if remote_namespace.empty?
-
linked_remote_repo = component_module.default_linked_remote_repo()
-
remote_namespace = linked_remote_repo ? linked_remote_repo[:repo_namespace] : nil
-
raise ErrorUsage.new("Not able to find linked remote namespace, please provide one") unless remote_namespace
-
end
-
remote_namespace
-
end
-
-
# override to include namespace in given calculations
-
1
def create_obj(param, model_class=nil,extra_context=nil)
-
1
id_or_name = ret_non_null_request_params(param)
-
1
namespace_delimiter = ::DTK::Namespace.namespace_delimiter()
-
1
if id_or_name.include?(namespace_delimiter)
-
1
namespace, id_or_name = id_or_name.split(namespace_delimiter)
-
end
-
-
1
id_resolved = resolve_id_from_name_or_id(id_or_name, model_class, extra_context || namespace)
-
-
1
create_object_from_id(id_resolved, model_class)
-
end
-
-
1
def get_obj(id, model_class=nil)
-
create_object_from_id(id, model_class)
-
end
-
-
end
-
end
-
-
1
class DeferrableBody
-
1
include EventMachine::Deferrable
-
-
1
def send(data)
-
@body_callback.call data
-
end
-
-
1
def each(&blk)
-
@body_callback = blk
-
end
-
end
-
1
module Ramaze::Helper
-
1
module NodeGroupHelper
-
1
def create_obj(id_or_name_param)
-
super(id_or_name_param,::DTK::NodeGroup)
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module NodeHelper
-
1
def ret_node_subtype_class()
-
subtype = ret_node_params_subtype()
-
if subtype == :template
-
::DTK::Node::Template
-
else
-
::DTK::Node
-
end
-
end
-
-
1
def ret_nodes_by_subtype_class(model_handle, opts = {})
-
subtype = ret_node_params_subtype()
-
if subtype == :template
-
::DTK::Node::Template.list(model_handle, opts)
-
else
-
if (opts[:is_list_all] == 'true')
-
::DTK::Node.list(model_handle, opts)
-
else
-
::DTK::Node.list_wo_assembly_nodes(model_handle)
-
end
-
end
-
end
-
-
1
def ret_node_params_subtype()
-
(ret_request_params(:subtype)||:instance).to_sym
-
end
-
-
1
def create_node_obj(id_param)
-
create_obj(id_param, ::DTK::Node)
-
end
-
-
1
def create_node_template_obj(id_param)
-
create_obj(id_param,::DTK::Node::Template)
-
end
-
-
1
def ret_node_params_object_and_subtype()
-
[create_obj(:node_id,ret_node_subtype_class()),ret_node_params_subtype()]
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module ProcessSearchObject
-
1
include XYZ
-
1
private
-
# fns that get _search_object
-
1
def ret_search_object_in_request()
-
source = hash = nil
-
if request_method_is_post?()
-
hash = ret_hash_search_object_in_post()
-
end
-
if hash #request_method_is_post and it has search pattern
-
source = :post_request
-
elsif @action_set_params and not @action_set_params.empty?
-
source = :action_set
-
hash = ret_hash_search_object_in_action_set_params(@action_set_params)
-
else
-
source = :get_request
-
hash = ret_hash_search_object_in_get()
-
end
-
SearchObject.create_from_input_hash(hash,source,ret_session_context_id()) if hash
-
end
-
-
-
1
def ret_hash_search_object_in_get()
-
# TODO: stub; incomplete
-
filter = ret_filter_when_get()
-
hash_search_pattern = {
-
:relation => model_name()
-
}
-
hash_search_pattern.merge!(:filter => filter) if filter
-
{"search_pattern" => hash_search_pattern}
-
end
-
-
1
def ret_filter_when_get()
-
hash = (ret_parsed_query_string_when_get()||{}).reject{|k,v|k == :parent_id}
-
return nil if hash.empty?
-
[:and] + hash.map{|k,v|[:eq,k,v]}
-
end
-
-
1
def ret_hash_search_object_in_action_set_params(action_set_params)
-
action_set_params["search"]
-
end
-
-
1
def ret_hash_search_object_in_post()
-
json_params = (ret_request_params()||{})["search"]
-
if json_params and not json_params.empty?
-
search_pattern = JSON.parse(json_params)
-
if rest_request?()
-
search_pattern["relation"] ||= model_name()
-
end
-
{"search_pattern" => search_pattern}
-
end
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module RemotesHelper
-
-
1
def info_git_remote(module_obj)
-
info = module_obj.get_linked_remote_repos.collect do |a|
-
provider_name = a.git_provider_name
-
unless ::DTK::RepoRemote::DTKN_PROVIDER.eql?(provider_name)
-
a.merge(
-
:url => a.git_remote_url,
-
:git_provider => provider_name,
-
:base_git_url => a.base_git_remote_url,
-
:base_git_location => a.base_git_remote_location
-
)
-
end
-
end
-
-
rest_ok_response info.compact
-
end
-
-
1
def add_git_url(repo_remote_mh, repo_id, remote_url)
-
remote_name = ::DTK::RepoRemote.git_provider_name(remote_url)
-
::DTK::RepoRemote.create_git_remote(repo_remote_mh, repo_id, remote_name, remote_url)
-
end
-
-
1
def add_git_remote(module_obj)
-
remote_name, remote_url = ret_non_null_request_params(:remote_name, :remote_url)
-
repo_remote_mh = module_obj.model_handle(:repo_remote)
-
-
response = ::DTK::RepoRemote.create_git_remote(repo_remote_mh, module_obj.get_workspace_repo.id, remote_name, remote_url)
-
-
rest_ok_response response
-
end
-
-
1
def remove_git_remote(module_obj)
-
remote_name = ret_non_null_request_params(:remote_name)
-
repo_remote_mh = module_obj.model_handle(:repo_remote)
-
-
::DTK::RepoRemote.delete_git_remote(repo_remote_mh, remote_name, module_obj.get_workspace_repo.id)
-
-
rest_ok_response
-
end
-
-
end
-
end
-
1
module Ramaze::Helper
-
1
module Rest
-
1
def rest_response()
-
10
unless @ctrl_results.kind_of?(BundleAndReturnHelper::ControllerResultsRest)
-
raise Error.new("controller results are in wrong form; it should have 'rest' form")
-
end
-
-
10
JSON.generate(@ctrl_results)
-
end
-
-
1
def rest_ok_response(data=nil,opts={})
-
5
data ||= Hash.new
-
5
if encode_format = opts[:encode_into]
-
# This might be a misnomer in taht payload is still a hash which then in RestResponse.new becomes json
-
# for case of yaml, the data wil be a string formed by yaml encoding
-
data =
-
case encode_format
-
when :yaml then encode_into_yaml(data)
-
else raise Error.new("Unexpected encode format (#{encode_format})")
-
end
-
end
-
-
5
payload = { :status => :ok, :data => data}
-
5
payload.merge!(:datatype => opts[:datatype]) if opts[:datatype]
-
-
# set custom messages in response
-
5
[:info, :warn, :error].each do |msg_type|
-
15
payload.merge!(msg_type => opts[msg_type]) if opts[msg_type]
-
end
-
-
5
RestResponse.new(payload)
-
end
-
-
#
-
# Actions needed is Array of Hashes with following attributes:
-
#
-
# :action => Name of action to be executed
-
# :params => Parameters needed to execute that action
-
# :wait_for_complete => In case we need to wait for end of that action, type and id
-
# It will call task_status for given entity.
-
# Example:
-
#[
-
# :action => :start,
-
# :params => {:assembly_id => assembly[:id]},
-
# :wait_for_complete => {:type => :assembly, :id => assembly[:id]}
-
#]
-
-
1
def rest_validate_response(message, actions_needed)
-
RestResponse.new({
-
:status => :notok,
-
:validation =>
-
{
-
:message => message,
-
:actions_needed => actions_needed
-
}
-
})
-
end
-
-
1
def rest_notok_response(errors=[{:code => :error}])
-
4
if errors.kind_of?(Hash)
-
4
errors = [errors]
-
end
-
4
RestResponse.new(:status => :notok, :errors => errors)
-
end
-
-
1
private
-
1
def encode_into_yaml(data,opts={})
-
data_to_encode = data
-
if opts[:remove_null_keys]
-
data_to_encode = remove_null_keys(data)
-
end
-
::DTK::Aux.serialize(data_to_encode,:yaml) + "\n"
-
end
-
-
1
def remove_null_keys(data)
-
if data.kind_of?(Hash)
-
ret = Hash.new
-
data.each_pair{|k,v|ret[k]=remove_null_keys(v) unless v.nil?}
-
ret
-
elsif data.kind_of?(Array)
-
data.map{|el|remove_null_keys(el)}
-
else
-
data
-
end
-
end
-
-
1
class RestResponse < Hash
-
1
def initialize(hash)
-
9
replace(hash)
-
end
-
1
def is_ok?
-
self[:status] == :ok
-
end
-
1
def data()
-
self[:data]
-
end
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module RestAsync
-
1
def rest_deferred_response(&blk)
-
response_procs = {
-
:ok => lambda do |data|
-
JSON.pretty_generate(rest_ok_response(data))
-
end,
-
:notok => lambda do |error|
-
error_hash = ::DTK::RestError.create(error).hash_form()
-
JSON.pretty_generate(rest_notok_response(error_hash))
-
end
-
}
-
async_callback = request.env['async.callback']
-
content_type = 'text/html'
-
::DTK::AsyncResponse.create(async_callback,content_type,response_procs,&blk)
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module TargetHelper
-
-
1
def ret_target_subtype()
-
(ret_request_params(:subtype)||:instance).to_sym
-
end
-
-
1
def ret_iaas_type(iaas_type_field=:iaas_type)
-
iaas_type = (ret_non_null_request_params(iaas_type_field)).to_sym
-
# check iaas type is valid
-
supported_types = ::R8::Config[:ec2][:iaas_type][:supported]
-
unless supported_types.include?(iaas_type.to_s.downcase)
-
raise ::DTK::ErrorUsage.new("Invalid iaas type '#{iaas_type}', supported types (#{supported_types.join(', ')})")
-
end
-
iaas_type
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module TaskHelper
-
1
def cancel_task(top_task_id)
-
task = ::DTK::Task.get_hierarchical_structure(id_handle(top_task_id,:task))
-
::DTK::Workflow.cancel(top_task_id, task)
-
end
-
-
1
def get_most_recent_executing_task(filter=nil)
-
aug_filter = [:and,filter,[:eq,:status,'executing']].compact
-
get_most_recent_task(aug_filter)
-
end
-
-
1
def get_most_recent_task(filter=nil)
-
::DTK::Task.get_top_level_most_recent_task(model_handle(:task),filter)
-
end
-
end
-
end
-
1
module Ramaze::Helper
-
1
module VersionHelper
-
1
def ret_version()
-
if version_string = ret_request_params(:version)
-
unless version = ::DTK::ModuleVersion.ret(version_string)
-
raise ::DTK::ErrorUsage::BadVersionValue.new(version_string)
-
end
-
version
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ActionDef < Model
-
1
r8_nested_require('action_def','content')
-
1
def self.common_columns()
-
core_columns()+[:method_name,:content,:component_component_id]
-
end
-
-
1
module Constant
-
1
module Variations
-
end
-
1
extend Aux::ParsingingHelper::ClassMixin
-
1
CreateActionName = 'create'
-
end
-
-
1
def self.get_ndx_action_defs(cmp_template_idhs,opts={})
-
ret = Hash.new
-
return ret if cmp_template_idhs.empty?
-
-
sp_hash = {
-
:cols => opts[:cols] ? (opts[:cols]+core_columns()+[:component_component_id]).uniq : common_columns(),
-
:filter => [:oneof,:component_component_id,cmp_template_idhs.map{|cmp|cmp.get_id{}}]
-
}
-
action_def_mh = cmp_template_idhs.first.createMH(:action_def)
-
get_objs(action_def_mh,sp_hash).each do |ad|
-
(ret[ad[:component_component_id]] ||= Array.new) << ad
-
end
-
ret
-
end
-
1
ColsToInclude = [:id,:group_id,:component_component_id]
-
-
1
def commands()
-
parse_and_reify_content?().commands()
-
end
-
-
1
def functions()
-
parse_and_reify_content?().functions()
-
end
-
-
# if parse does not go through; raises parse error
-
1
def self.parse(hash_content)
-
Content.parse(hash_content)
-
end
-
-
1
private
-
1
def parse_and_reify_content?()
-
content = get_field?(:content)
-
unless content.kind_of?(Content)
-
if content.kind_of?(Hash)
-
hash_content = content
-
self[:content] = Content.parse(hash_content)
-
else
-
raise Error.new("Unexpected class type (#{content.class}")
-
end
-
end
-
self[:content]
-
end
-
end
-
end
-
2
module DTK; class ActionDef
-
# Top class for content classes which as hash part store raw form and then have
-
# instance attributes for the parsed form
-
1
class Content < Hash
-
1
r8_nested_require('content','constant')
-
1
r8_nested_require('content','command')
-
1
r8_nested_require('content','template_processor')
-
-
1
attr_reader :commands, :functions
-
1
def initialize(hash_content)
-
super()
-
replace(hash_content)
-
end
-
1
def self.parse(hash)
-
new(hash).parse_and_reify!()
-
end
-
-
1
def parse_and_reify!()
-
@commands = (self[Constant::Commands]||[]).map do |serialized_command|
-
Command.parse(serialized_command)
-
end
-
@functions = (self[Constant::Functions]||[]).map do |serialized_command|
-
Command.parse(serialized_command)
-
end
-
self
-
end
-
end
-
end; end
-
3
module DTK; class ActionDef; class Content
-
1
class Command
-
1
r8_nested_require('command','syscall')
-
1
r8_nested_require('command','file_positioning')
-
1
r8_nested_require('command','ruby_function')
-
# TODO: stub
-
1
def self.parse(serialized_command)
-
Syscall.parse?(serialized_command) || FilePositioning.parse?(serialized_command) || RubyFunction.parse?(serialized_command) ||
-
raise(Error.new("Parse Error: #{serialized_command.inspect}")) # TODO: bring in dtk model parsing parse error class
-
end
-
-
1
def is_syscall?()
-
kind_of?(Syscall)
-
end
-
-
end
-
end; end; end
-
-
3
module DTK; class ActionDef; class Content
-
1
class Command
-
1
class FilePositioning < self
-
1
def self.parse?(serialized_command)
-
# TODO: stub
-
end
-
-
1
def type()
-
'file'
-
end
-
-
end
-
end
-
end; end; end
-
3
module DTK; class ActionDef; class Content
-
1
class Command
-
1
class RubyFunction < self
-
1
attr_reader :ruby_function
-
-
1
def needs_template_substitution?
-
@needs_template_substitution
-
end
-
-
1
def initialize(ruby_function)
-
@ruby_function = ruby_function
-
end
-
-
1
def process_function_assign_attrs(attrs, dyn_attrs)
-
@ruby_function.each_pair do |d_attr, fn|
-
begin
-
evaluated_fn = proc do
-
$SAFE = 4
-
eval(fn)
-
end.call
-
-
attr_id = (attrs.find { |a| a[:display_name].eql?(d_attr.to_s) } || {})[:id]
-
attr_val = calculate_dyn_attr_value(evaluated_fn, attrs)
-
dyn_attrs << { attribute_id: attr_id, attribute_val: attr_val }
-
rescue SecurityError => e
-
pp [e, e.backtrace[0..5]]
-
return { error: e }
-
end
-
end
-
end
-
-
1
def self.parse?(serialized_command)
-
if serialized_command.is_a?(Hash) && serialized_command.key?(:outputs)
-
ruby_function = serialized_command[:outputs]
-
new(ruby_function)
-
end
-
end
-
-
1
def calculate_dyn_attr_value(evaluated_fn, attrs)
-
value = nil
-
parsed_attrs = parse_attributes(attrs)
-
if evaluated_fn.is_a?(Proc) && evaluated_fn.lambda?
-
params = process_lambda_params(evaluated_fn, parsed_attrs)
-
value = evaluated_fn.call(*params)
-
else
-
raise Error.new('Currently only lambda functions are supported')
-
end
-
value
-
end
-
-
1
def process_lambda_params(lambda_fn, parsed_attrs)
-
ret_params = []
-
lambda_fn.parameters.each do |pm|
-
ret_params << parsed_attrs[pm[1].to_s]
-
end
-
ret_params
-
end
-
-
1
def parse_attributes(attrs)
-
parsed_attrs = {}
-
attrs.each do |attr|
-
if attr[:data_type].eql?('integer')
-
parsed_attrs[attr[:display_name]] = (attr[:value_asserted] || attr[:value_derived]).to_i
-
else
-
parsed_attrs[attr[:display_name]] = attr[:value_asserted] || attr[:value_derived]
-
end
-
end
-
parsed_attrs
-
end
-
-
1
def type
-
'ruby_function'
-
end
-
end
-
end
-
end; end; end
-
3
module DTK; class ActionDef; class Content
-
1
class Command
-
1
class Syscall < self
-
1
r8_nested_require('syscall','interpret_results')
-
-
1
attr_reader :command_line
-
1
def needs_template_substitution?()
-
@needs_template_substitution
-
end
-
1
def initialize(raw_form,command_line)
-
@raw_form = raw_form
-
@command_line = command_line
-
@template_processor = Content::TemplateProcessor.default() # TODO: changed when have multiple choices for template processors
-
@needs_template_substitution = !!@template_processor.needs_template_substitution?(command_line)
-
end
-
-
1
def self.parse?(serialized_command)
-
if serialized_command.kind_of?(String) and serialized_command =~ Constant::Command::RunRegexp
-
command_line = $1
-
new(serialized_command,command_line)
-
end
-
end
-
-
1
def bind_template_attributes!(attr_val_pairs)
-
@command_line = @template_processor.bind_template_attributes(@command_line, attr_val_pairs)
-
@needs_template_substitution = false
-
self
-
end
-
-
1
def type()
-
'syscall'
-
end
-
end
-
end
-
end; end; end
-
3
module DTK; class ActionDef; class Content
-
1
module Constant
-
1
Commands = :commands
-
1
Functions = :functions
-
1
module Command
-
1
RunRegexp = /^RUN\s+(.+$)/
-
end
-
end
-
end; end; end
-
3
module DTK; class ActionDef; class Content
-
1
class TemplateProcessor
-
# TODO: hard wired in mustache
-
1
r8_nested_require('template_processor/adapter','mustache_template')
-
1
def self.default()
-
MustacheTemplate.new()
-
end
-
end
-
end; end; end
-
3
module DTK; class ActionDef; class Content
-
1
class TemplateProcessor
-
1
class MustacheTemplate < self
-
1
include MustacheTemplateMixin
-
-
1
def bind_template_attributes(command_line, attr_val_pairs)
-
begin
-
bind_template_attributes_utility(command_line,attr_val_pairs)
-
rescue MustacheTemplateError::MissingVar => e
-
ident = 4
-
err_msg = "The mustache variable '#{e.missing_var}' in the following command is not set:\n#{' '*ident}#{command_line}"
-
raise ErrorUsage.new(err_msg)
-
rescue MustacheTemplateError => e
-
raise ErrorUsage.new("Template error in command (#{command_line}): #{e.error_message}")
-
end
-
end
-
-
end
-
end
-
end; end; end
-
# TODO: finish moving the fns and mixins that relate just to template or instance to these files
-
1
module DTK
-
1
class Assembly < Component
-
1
r8_nested_require('assembly','list')
-
1
r8_nested_require('assembly','template')
-
1
r8_nested_require('assembly','instance')
-
1
include ListMixin
-
1
extend ListClassMixin
-
-
1
def self.get_these_objs(mh,sp_hash,opts={})
-
Model.get_objs(mh.createMH(:component),sp_hash,opts).map{|cmp|create_from_component(cmp)}
-
end
-
1
def self.create_from_component(cmp)
-
cmp && create_from_id_handle(cmp.id_handle()).merge(cmp)
-
end
-
-
### standard get methods
-
1
def get_assembly_level_attributes(filter_proc=nil)
-
sp_hash = {
-
:cols => [:id,:display_name,:attribute_value,:data_type],
-
:filter => [:eq,:component_component_id, id()]
-
}
-
ret = Model.get_objs(model_handle(:attribute),sp_hash)
-
if filter_proc
-
ret.select{|r| filter_proc.call(r)}
-
else
-
ret
-
end
-
end
-
-
1
def get_service_module()
-
get_obj_helper(:service_module)
-
end
-
-
1
def get_namespace()
-
service_module = get_service_module()
-
-
sp_hash = {
-
:cols => [:id, :display_name],
-
:filter => [:eq, :id, service_module[:namespace_id]]
-
}
-
-
namespace = Model.get_obj(model_handle(:namespace),sp_hash)
-
end
-
-
1
def get_port_links(opts={})
-
filter = [:eq,:assembly_id,id()]
-
if opts[:filter]
-
filter = [:and,filter,opts[:filter]]
-
end
-
sp_hash = {
-
:cols => opts[:cols]||PortLink.common_columns(),
-
:filter => filter
-
}
-
Model.get_objs(model_handle(:port_link),sp_hash)
-
end
-
-
1
def get_matching_port_link(filter)
-
opts = {:filter => filter, :ret_match_info => Hash.new}
-
matches = get_augmented_port_links(opts)
-
case matches.size
-
when 1
-
matches.first
-
when 0
-
raise ErrorUsage.new("Cannot find component link#{error_message_condition(opts[:ret_match_info])}")
-
else
-
raise ErrorUsage.new("Multiple matching component links#{error_message_condition(opts[:ret_match_info])}")
-
end
-
end
-
-
1
def error_message_condition(match_info)
-
if clause = (match_info||{})[:clause]
-
" with condition (#{clause})"
-
else
-
""
-
end
-
end
-
1
private :error_message_condition
-
-
# augmented with the ports and nodes; component_id is on ports
-
1
def get_augmented_port_links(opts={})
-
rows = get_objs(:cols => [:augmented_port_links])
-
# TODO: remove when have all create port link calls set port_link display name to service type
-
rows.each{|r|r[:port_link][:display_name] ||= r[:input_port].link_def_name()}
-
if filter = opts[:filter]
-
post_filter = port_link_filter_lambda_form(filter,opts)
-
rows.reject!{|r|!post_filter.call(r)}
-
end
-
rows.map do |r|
-
r[:port_link].merge(r.slice(:input_port,:output_port,:input_node,:output_node))
-
end
-
end
-
-
-
1
def port_link_filter_lambda_form(filter,opts={})
-
if Aux.has_just_these_keys?(filter,[:port_link_id])
-
port_link_id = filter[:port_link_id]
-
if opts[:ret_match_info]
-
opts[:ret_match_info][:clause] = "port_link_id = #{port_link_id.to_s}"
-
end
-
lambda{|r|r[:port_link][:id] == port_link_id}
-
elsif Aux.has_just_these_keys?(filter,[:input_component_id])
-
input_component_id = filter[:input_component_id]
-
# not setting opts[:ret_match_info][:clause] because :input_component_id internally generated
-
lambda{|r|r[:input_port][:component_id] == input_component_id}
-
elsif Aux.has_only_these_keys?(filter,[:service_type,:input_component_id,:output_component_id])
-
unless input_component_id = filter[:input_component_id]
-
raise Error.new("Unexpected filter (#{filter.inspect})")
-
end
-
output_component_id = filter[:output_component_id]
-
service_type = filter[:service_type]
-
# not including conjunct with :input_component_id or output_component_id because internally generated
-
if opts[:ret_match_info] and service_type
-
opts[:ret_match_info][:clause] = "service_type = '#{service_type}'"
-
end
-
lambda do |r|
-
(r[:input_port][:component_id] == input_component_id) and
-
(service_type.nil? or (r[:port_link][:display_name] == service_type)) and
-
(output_component_id.nil? or (r[:output_port][:component_id] == output_component_id))
-
end
-
else
-
raise Error.new("Unexpected filter (#{filter.inspect})")
-
end
-
end
-
1
private :port_link_filter_lambda_form
-
-
# MOD_RESTRUCT: this must be removed or changed to reflect more advanced relationship between component ref and template
-
1
def self.get_component_templates(assembly_mh,filter=nil)
-
sp_hash = {
-
:cols => [:id, :display_name,:component_type,:component_templates],
-
:filter => [:and, [:eq, :type, "composite"], [:neq, :library_library_id, nil], filter].compact
-
}
-
assembly_rows = get_objs(assembly_mh,sp_hash)
-
assembly_rows.map{|r|r[:component_template]}
-
end
-
-
# this can be overwritten
-
1
def self.get_component_attributes(assembly_mh,template_assembly_rows,opts={})
-
Array.new
-
end
-
-
### end: standard get methods
-
-
1
def self.get_default_component_attributes(assembly_mh,assembly_rows,opts={})
-
ret = Array.new
-
cmp_ids = assembly_rows.map{|r|(r[:nested_component]||{})[:id]}.compact
-
return ret if cmp_ids.empty?
-
-
# by defalut do not include derived values
-
cols = [:id,:display_name,:value_asserted,:component_component_id,:is_instance_value] + (opts[:include_derived] ? [:value_derived] : [])
-
sp_hash = {
-
:cols => cols,
-
:filter => [:oneof, :component_component_id,cmp_ids]
-
}
-
Model.get_objs(assembly_mh.createMH(:attribute),sp_hash)
-
end
-
-
1
def set_attributes(av_pairs,opts={})
-
# return attr_patterns
-
Attribute::Pattern::Assembly.set_attributes(self,av_pairs,opts)
-
end
-
-
1
def self.ret_component_type(service_module_name,assembly_name)
-
"#{service_module_name}__#{assembly_name}"
-
end
-
-
1
def self.pretty_print_version(assembly)
-
assembly[:version] && ModuleBranch.version_from_version_field(assembly[:version])
-
end
-
-
1
def are_nodes_running_in_task?()
-
nodes = get_nodes(:id)
-
running_nodes = Task::Status::Assembly.get_active_nodes(model_handle())
-
-
return false if running_nodes.empty?
-
interrsecting_nodes = (running_nodes.map(&:id) & nodes.map(&:id))
-
-
!interrsecting_nodes.empty?
-
end
-
-
1
def self.is_template?(assembly_idh)
-
assembly_idh.create_object().is_template?()
-
end
-
1
def is_template?()
-
not update_object!(:library_library_id)[:library_library_id].nil?
-
end
-
-
#### for cloning
-
1
def add_model_specific_override_attrs!(override_attrs,target_obj)
-
override_attrs[:display_name] ||= SQL::ColRef.qualified_ref
-
override_attrs[:updated] ||= false
-
end
-
-
##############
-
# TODO: looks like callers dont need all the detail; might just provide summarized info or instead pass arg that specifies sumamry level
-
# also make optional whether materialize
-
1
def get_node_assembly_nested_objects()
-
ndx_nodes = Hash.new
-
sp_hash = {:cols => [:instance_nodes_and_cmps]}
-
node_col_rows = get_objs(sp_hash)
-
node_col_rows.each do |r|
-
if node = r[:node]
-
n = node.materialize!(Node.common_columns)
-
node = ndx_nodes[n[:id]] ||= n.merge(:components => Array.new)
-
node[:components] << r[:nested_component].materialize!(Component.common_columns())
-
end
-
end
-
-
nested_node_ids = ndx_nodes.keys
-
sp_hash = {
-
:cols => Port.common_columns(),
-
:filter => [:oneof, :node_node_id, nested_node_ids]
-
}
-
port_rows = Model.get_objs(model_handle(:port),sp_hash)
-
port_rows.each do |r|
-
node = ndx_nodes[r[:node_node_id]]
-
(node[:ports] ||= Array.new) << r.materialize!(Port.common_columns())
-
end
-
port_links = get_port_links()
-
port_links.each{|pl|pl.materialize!(PortLink.common_columns())}
-
-
{:nodes => ndx_nodes.values, :port_links => port_links}
-
end
-
-
1
def is_assembly?()
-
true
-
end
-
1
def assembly?(opts={})
-
if opts[:subclass_object]
-
self.class.create_assembly_subclass_object(self)
-
else
-
self
-
end
-
end
-
1
def self.create_assembly_subclass_object(obj)
-
obj.update_object!(:datacenter_datacenter_id)
-
subclass_model_name = (obj[:datacenter_datacenter_id] ? :assembly_instance : :assembly_template)
-
create_subclass_object(obj,subclass_model_name)
-
end
-
-
1
def get_component_with_attributes_unraveled(attr_filters={})
-
attr_vc = "#{assembly_type()}_assembly_attributes".to_sym
-
sp_hash = {:columns => [:id,:display_name,:component_type,:basic_type,attr_vc]}
-
component_and_attrs = get_objects_from_sp_hash(sp_hash)
-
return nil if component_and_attrs.empty?
-
sample = component_and_attrs.first
-
# TODO: hack until basic_type is populated
-
# component = sample.subset(:id,:display_name,:component_type,:basic_type)
-
component = sample.subset(:id,:display_name,:component_type).merge(:basic_type => "#{assembly_type()}_assembly")
-
node_attrs = {:node_id => sample[:node][:id], :node_name => sample[:node][:display_name]}
-
filtered_attrs = component_and_attrs.map do |r|
-
attr = r[:attribute]
-
if attr and not attribute_is_filtered?(attr,attr_filters)
-
cmp = r[:sub_component]
-
cmp_attrs = {:component_type => cmp[:component_type],:component_name => cmp[:display_name]}
-
attr.merge(node_attrs).merge(cmp_attrs)
-
end
-
end.compact
-
attributes = AttributeComplexType.flatten_attribute_list(filtered_attrs)
-
component.merge(:attributes => attributes)
-
end
-
1
def assembly_type()
-
# TODO: stub; may use basic_type to distinguish between component and node assemblies
-
:node
-
end
-
1
private :assembly_type
-
end
-
end
-
2
module DTK; class Assembly
-
1
class Instance < self
-
1
r8_nested_require('instance','service_link_mixin')
-
1
r8_nested_require('instance','service_link')
-
1
r8_nested_require('instance','action')
-
1
r8_nested_require('instance','violation')
-
1
r8_nested_require('instance','update')
-
1
r8_nested_require('instance','list')
-
1
r8_nested_require('instance','get')
-
1
r8_nested_require('instance','delete')
-
1
r8_nested_require('instance','service_setting')
-
1
r8_nested_require('instance','op_status')
-
1
include ServiceLinkMixin
-
1
include ViolationMixin
-
1
include ListMixin
-
1
extend ListClassMixin
-
1
include DeleteMixin
-
1
extend DeleteClassMixin
-
1
include GetMixin
-
1
extend GetClassMixin
-
1
include OpStatus::Mixin
-
1
extend OpStatus::ClassMixin
-
-
1
def self.create_from_id_handle(idh)
-
idh.create_object(:model_name => :assembly_instance)
-
end
-
-
1
def rename(assembly_mh, name, new_name)
-
assembly_list = Assembly::Instance.list(assembly_mh)
-
raise ErrorUsage.new("You are not allowed to use keyword '#{new_name}' as #{pp_object_type()} name") if new_name.to_s.eql?("workspace")
-
-
assembly_list.each do |assembly|
-
raise ErrorUsage.new("#{pp_object_type().cap} with name '#{new_name}' exists already") if assembly[:display_name].to_s.eql?(new_name)
-
end
-
-
update(:display_name => new_name)
-
end
-
-
1
def clear_tasks(opts={})
-
opts_get_tasks = Hash.new
-
unless opts[:include_executing_task]
-
opts_get_tasks[:filter_proc] = lambda do |r|
-
r[:task][:status] != 'executing'
-
end
-
end
-
task_idhs = get_tasks(opts_get_tasks).map{|r|r.id_handle()}
-
Model.delete_instances(task_idhs) unless task_idhs.empty?
-
task_idhs
-
end
-
-
1
def get_info__flat_list(opts={})
-
filter = [:eq,:id,id()]
-
self.class.get_info__flat_list(model_handle(),{:filter => filter}.merge(opts))
-
end
-
-
1
def remove_empty_nodes(nodes, opts={})
-
filter = [:eq,:id,id()]
-
self.class.remove_empty_nodes(model_handle(), nodes, {:filter => filter}.merge(opts))
-
end
-
-
1
def self.remove_empty_nodes(assembly_mh, nodes, opts={})
-
assembly_empty_nodes = {}
-
target_idh = opts[:target_idh]
-
target_filter = (target_idh ? [:eq, :datacenter_datacenter_id, target_idh.get_id()] : [:neq, :datacenter_datacenter_id, nil])
-
filter = [:and, [:eq, :type, "composite"], target_filter,opts[:filter]].compact
-
col,needs_empty_nodes = list_virtual_column?(opts[:detail_level])
-
cols = [:id,:ref,:display_name,:group_id,:component_type,:version,:created_at,col].compact
-
ret = get(assembly_mh,{:cols => cols}.merge(opts))
-
-
nodes_ids = ret.map{|r|(r[:node]||{})[:id]}.compact
-
sp_hash = {
-
:cols => [:id, :display_name,:component_type,:version,:instance_nodes_and_assembly_template],
-
:filter => filter
-
}
-
assembly_empty_nodes = get_objs(assembly_mh,sp_hash).reject{|r|nodes_ids.include?((r[:node]||{})[:id])}
-
-
assembly_empty_nodes.each do |en|
-
if node = en[:node]
-
nodes.delete_if{|n| n[:id] == node[:id]}
-
end
-
end
-
-
nodes
-
end
-
-
# returns column plus whether need to pull in empty assembly nodes (assembly nodes w/o any components)
-
#[col,empty_assem_nodes]
-
1
def self.list_virtual_column?(detail_level=nil)
-
empty_assem_nodes = false
-
col =
-
if detail_level.nil?
-
nil
-
elsif detail_level == "nodes"
-
empty_assem_nodes = true
-
# TODO: use below for component detail and introduce a more succinct one for nodes
-
:instance_nodes_and_cmps_summary
-
elsif detail_level == "components"
-
empty_assem_nodes = true
-
:instance_nodes_and_cmps_summary
-
else
-
raise Error.new("not implemented list_virtual_column at detail level (#{detail_level})")
-
end
-
[col,empty_assem_nodes]
-
end
-
1
private_class_method :list_virtual_column?
-
-
1
def add_node(node_name, node_binding_rs=nil, opts={})
-
# if assembly_wide node (used to add component directly on service_instance/assembly_template/workspace)
-
# check if type = 'assembly_wide'
-
# or check by name if regular node
-
check = opts[:assembly_wide] ? [:eq, :type, 'assembly_wide'] : [:eq, :display_name, node_name]
-
-
# check if node has been added already
-
if get_node?(check)
-
raise ErrorUsage.new("Node (#{node_name}) already belongs to #{pp_object_type} (#{get_field?(:display_name)})")
-
end
-
-
target = get_target()
-
node_template = Node::Template.find_matching_node_template(target,:node_binding_ruleset => node_binding_rs)
-
-
override_attrs = {
-
:display_name => node_name,
-
:assembly_id => id(),
-
}
-
override_attrs.merge!(:type => 'assembly_wide') if opts[:assembly_wide]
-
clone_opts = node_template.source_clone_info_opts()
-
new_obj = target.clone_into(node_template,override_attrs,clone_opts)
-
new_obj && new_obj.id_handle()
-
end
-
-
1
def add_node_group(node_group_name, node_binding_rs, cardinality)
-
# check if node has been added already
-
if get_node?([:eq,:display_name, node_group_name])
-
raise ErrorUsage.new("Node (#{node_group_name}) already belongs to #{pp_object_type} (#{get_field?(:display_name)})")
-
end
-
-
target = get_target()
-
node_template = Node::Template.find_matching_node_template(target, :node_binding_ruleset => node_binding_rs)
-
-
self.update_object!(:display_name)
-
ref = SQL::ColRef.concat("assembly--", "#{self[:display_name]}--#{node_group_name}")
-
-
override_attrs = {
-
:display_name => node_group_name,
-
:assembly_id => id(),
-
:type => "node_group_staged",
-
:ref => ref
-
}
-
-
clone_opts = node_template.source_clone_info_opts()
-
new_obj = target.clone_into(node_template,override_attrs,clone_opts)
-
Node::NodeAttribute.create_or_set_attributes?([new_obj], :cardinality, cardinality)
-
-
node_group_obj = new_obj.create_obj_optional_subclass()
-
node_group_obj.add_group_members(cardinality.to_i)
-
-
new_obj && new_obj.id_handle()
-
end
-
-
# aug_cmp_template is a component template augmented with keys having objects
-
# :module_branch
-
# :component_module
-
# :namespace
-
# opts can have
-
# :idempotent
-
# :donot_update_workflow
-
1
def add_component(node_idh, aug_cmp_template, component_title, opts={})
-
# if node_idh it means we call add component from node context
-
# else we call from service instance/workspace and use assembly_wide node
-
if node_idh
-
# first check that node_idh is directly attached to the assembly instance
-
# one reason it may not be is if its a node group member
-
sp_hash = {
-
:cols => [:id, :display_name,:group_id, :ordered_component_ids],
-
:filter => [:and, [:eq, :id, node_idh.get_id()], [:eq, :assembly_id, id()]]
-
}
-
-
unless node = Model.get_obj(model_handle(:node),sp_hash)
-
if node_group = is_node_group_member?(node_idh)
-
raise ErrorUsage.new("Not implemented: adding a component to a node group member; a component can only be added to the node group (#{node_group[:display_name]}) itself")
-
else
-
raise ErrorIdInvalid.new(node_idh.get_id(),:node)
-
end
-
end
-
else
-
node = create_assembly_wide_node?()
-
end
-
-
cmp_instance_idh = nil
-
-
Transaction do
-
cmp_instance_idh = node.add_component(aug_cmp_template, opts.merge(:component_title => component_title))
-
add_component__update_component_module_refs?(aug_cmp_template[:component_module],aug_cmp_template[:namespace])
-
unless opts[:donot_update_workflow]
-
Task::Template::ConfigComponents.update_when_added_component?(self,node,cmp_instance_idh.create_object(),component_title,:skip_if_not_found => true)
-
end
-
end
-
cmp_instance_idh
-
end
-
-
1
def add_component__update_component_module_refs?(component_module,namespace)
-
assembly_branch = AssemblyModule::Service.get_or_create_assembly_branch(self)
-
assembly_branch.set_dsl_parsed!(true)
-
component_module_refs = ModuleRefs.get_component_module_refs(assembly_branch)
-
cmp_modules_with_namespaces = component_module.merge(:namespace_name => namespace[:display_name])
-
if update_needed = component_module_refs.update_object_if_needed!([cmp_modules_with_namespaces])
-
# This saves teh upadte to the object model
-
component_module_refs.update()
-
end
-
end
-
1
private :add_component__update_component_module_refs?
-
-
1
def create_assembly_wide_node?()
-
sp_hash = {
-
:cols => [:id, :display_name,:group_id, :ordered_component_ids],
-
:filter => [:and, [:eq, :type, 'assembly_wide'], [:eq, :assembly_id, id()]]
-
}
-
node = Model.get_obj(model_handle(:node), sp_hash)
-
-
unless node
-
node_idh = add_node('assembly_wide', nil, {:assembly_wide => true})
-
node = node_idh.create_object()
-
end
-
-
node
-
end
-
-
1
def has_assembly_wide_node?()
-
sp_hash = {
-
:cols => [:id, :display_name,:group_id, :ordered_component_ids],
-
:filter => [:and, [:eq, :type, 'assembly_wide'], [:eq, :assembly_id, id()]]
-
}
-
Model.get_obj(model_handle(:node), sp_hash)
-
end
-
-
#rturns a node group object if node_idh is a node group member of this assembly instance
-
1
def is_node_group_member?(node_idh)
-
sp_hash = {
-
:cols => [:id, :display_name,:group_id, :node_members],
-
:filter => [:eq, :assembly_id, id()]
-
}
-
node_id = node_idh.get_id()
-
Model.get_objs(model_handle(:node),sp_hash).find{|ng|ng[:node_member].id == node_id}
-
end
-
1
private :is_node_group_member?
-
-
1
def add_assembly_template(assembly_template)
-
target = get_target()
-
assem_id_assign = {:assembly_id => id()}
-
# TODO: want to change node names if dups
-
override_attrs = {:node => assem_id_assign.merge(:component_ref => assem_id_assign),:port_link => assem_id_assign}
-
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
-
new_assembly_part_obj = target.clone_into(assembly_template,override_attrs,clone_opts)
-
self.class.delete_instance(new_assembly_part_obj.id_handle())
-
id_handle()
-
end
-
-
1
def add_service_add_on(add_on_name, assembly_name=nil)
-
update_object!(:display_name)
-
-
unless aug_service_add_on = get_augmented_service_add_on(add_on_name)
-
raise ErrorUsage.new("Service add on (#{add_on_name}) is not a possible extension for assembly (#{self[:display_name]})")
-
end
-
sub_assembly_template = aug_service_add_on[:sub_assembly_template].copy_as_assembly_template()
-
-
override_attrs = {
-
:display_name => assembly_name||aug_service_add_on.new_sub_assembly_name(self,sub_assembly_template),
-
:assembly_id => id()
-
}
-
clone_opts = {
-
:ret_new_obj_with_cols => [:id,:type],
-
:service_add_on_info => {
-
:base_assembly => self,
-
:service_add_on => aug_service_add_on
-
}
-
}
-
target = get_target()
-
new_sub_assembly = target.clone_into(sub_assembly_template,override_attrs,clone_opts)
-
new_sub_assembly && new_sub_assembly.id_handle()
-
end
-
-
1
def create_or_update_template(service_module,template_name)
-
service_module_name = service_module.get_field?(:display_name)
-
project = service_module.get_project()
-
node_idhs = get_nodes().map{|r|r.id_handle()}
-
if node_idhs.empty?
-
raise ErrorUsage.new("Cannot find any nodes associated with assembly (#{get_field?(:display_name)})")
-
end
-
Assembly::Template.create_or_update_from_instance(project,node_idhs,template_name,service_module_name)
-
end
-
-
1
def set_attribute(attribute,value,opts={})
-
set_attributes([{:pattern => attribute,:value => value}],opts)
-
end
-
-
1
def set_attributes(av_pairs,opts={})
-
attr_patterns = nil
-
Transaction do
-
# super does the processing that sets the actual attributes then if opts[:update_meta] set
-
# then if opts[:update_meta] set meta info can be changed on the assembly module
-
attr_patterns = super
-
-
# return if ambiguous attributes (component and node have same name and attribute)
-
return attr_patterns if attr_patterns.is_a?(Hash) && attr_patterns[:ambiguous]
-
-
if opts[:update_meta]
-
created_cmp_level_attrs = attr_patterns.select{|r|r.type == :component_level and r.created?()}
-
unless created_cmp_level_attrs.empty?
-
AssemblyModule::Component::Attribute.update(self,created_cmp_level_attrs)
-
end
-
end
-
end
-
attr_patterns
-
end
-
-
1
def self.check_valid_id(model_handle,id)
-
filter =
-
[:and,
-
[:eq, :id, id],
-
[:eq, :type, "composite"],
-
[:neq, :datacenter_datacenter_id, nil]]
-
check_valid_id_helper(model_handle,id,filter)
-
end
-
-
1
def self.name_to_id(model_handle,name)
-
parts = name.split("/")
-
augmented_sp_hash =
-
if parts.size == 1
-
{:cols => [:id],
-
:filter => [:and,
-
[:eq, :display_name, parts[0]],
-
[:eq, :type, "composite"],
-
[:neq, :datacenter_datacenter_id, nil]]
-
}
-
elsif parts.size == 2
-
{:cols => [:id,:component_type,:target],
-
:filter => [:and,
-
[:eq, :display_name, parts[1]],
-
[:eq, :type, "composite"]],
-
:post_filter => lambda{|r|r[:target][:display_name] == parts[0]}
-
}
-
else
-
raise ErrorNameInvalid.new(name,pp_object_type())
-
end
-
name_to_id_helper(model_handle,name,augmented_sp_hash)
-
end
-
-
# TODO: probably move to Assembly
-
1
def model_handle(mn=nil)
-
super(mn||:component)
-
end
-
-
end
-
end
-
# TODO: hack to get around error in lib/model.rb:31:in `const_get
-
1
AssemblyInstance = Assembly::Instance
-
end
-
-
1
module DTK
-
1
class Assembly::Instance
-
1
module Action
-
1
r8_nested_require('action','execute_tests')
-
1
r8_nested_require('action','ssh_access')
-
1
class GetLog < ActionResultsQueue
-
1
private
-
1
def action_hash()
-
{:agent => :tail, :method => :get_log}
-
end
-
end
-
1
class Grep < ActionResultsQueue
-
1
private
-
1
def action_hash()
-
{:agent => :tail, :method => :grep}
-
end
-
end
-
-
1
class GetPs < ActionResultsQueue
-
1
private
-
1
def action_hash()
-
{:agent => :ps, :method => :get_ps}
-
end
-
-
1
def process_data!(data,node_info)
-
Result.new(node_info[:display_name],data.map{|r|node_info.merge(r)})
-
end
-
end
-
-
1
class ActionAgent < ActionResultsQueue
-
1
def action_hash()
-
{ :agent => :action_agent, :method => :run_command }
-
end
-
end
-
-
1
class GetNetstats < ActionResultsQueue
-
1
private
-
1
def action_hash()
-
{:agent => :netstat, :method => :get_tcp_udp}
-
end
-
-
1
def process_data!(data,node_info)
-
ndx_ret = Hash.new
-
data.each do |r|
-
next unless r[:state] == "LISTEN" || r[:protocol] == "udp"
-
if r[:local] =~ /(^.+):([0-9]+$)/
-
address = $1
-
port = $2.to_i
-
next unless address =~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$|::/
-
ndx_ret["#{address}_#{port}"] ||= {
-
:port => port,
-
:local_address => address,
-
:protocol => r[:protocol]
-
}
-
end
-
end
-
Result.new(node_info[:display_name],ndx_ret.values)
-
end
-
end
-
end
-
end
-
end
-
# -*- coding: utf-8 -*-
-
1
module DTK
-
1
class Assembly::Instance
-
1
module Action
-
1
class ExecuteTests < ActionResultsQueue
-
1
attr_reader :error
-
1
def initialize(params)
-
super()
-
@agent_action = params[:agent_action]
-
@project = params[:project]
-
@assembly_instance = params[:assembly_instance]
-
@nodes = params[:nodes]
-
@type = :assembly
-
@filter = params[:component]
-
@error = nil
-
end
-
-
1
def initiate()
-
test_cmps = get_test_components_with_bindings()
-
if test_cmps.empty?
-
@error = "Unable to execute tests. There are no links to test components!"
-
raise ::DTK::ErrorUsage
-
end
-
-
# Recognize if nodes are part of node group and map test components to nodes appropriately
-
node_names = @nodes.map { |n| { :name => n[:display_name], :id => n[:id]} }
-
test_components = []
-
test_cmps.each do |tc|
-
node_names.each do |node|
-
if node[:name].split("::").last.split(":").first == tc[:node_name]
-
out = tc.dup
-
out[:node_name] = node[:name]
-
out[:node_id] = node[:id]
-
test_components << out
-
else
-
test_components << tc
-
end
-
end
-
end
-
test_components.uniq!
-
-
test_components.select! { |tc| node_names.map{|n| n[:name]}.include? tc[:node_name] }
-
-
ndx_version_contexts = get_version_contexts(test_components).inject(Hash.new){|h,vc|h.merge(vc[:id]=>vc)}
-
version_contexts = ndx_version_contexts.values
-
-
test_instances = test_components.map do |test_cmp|
-
unless version_context = ndx_version_contexts[test_cmp[:implementation_id]]
-
raise Error.new("Cannot find version context for #{test_cmp[:dispaly_name]}")
-
end
-
attrib_array = test_cmp[:attributes].map{|a|{a[:display_name].to_sym =>a[:attribute_value]}}
-
test_name = (test_cmp[:external_ref]||{})[:test_name]
-
{
-
:module_name => version_context[:implementation],
-
:component => "#{test_cmp[:node_name]}/#{test_cmp[:component_name]}",
-
:test_component => test_cmp[:display_name],
-
:test_name => test_name,
-
:params => attrib_array
-
}
-
end
-
-
node_ids_with_tests = test_components.inject(Hash.new){|h,tc|h.merge(tc[:node_id] => true)}.keys
-
ndx_pbuilderid_to_node_info = nodes.inject(Hash.new) do |h,n|
-
h.merge(n.pbuilderid => {:id => n[:id].to_s, :display_name => n[:display_name]})
-
end
-
-
# filter nodes with tests
-
nodes.select! { |node| node_ids_with_tests.include? node[:id]}
-
-
# part of the code used to decide which components belong to which nodes.
-
# based on that fact, serverspec tests will be triggered on node only for components that actually belong to that specific node
-
node_hash = {}
-
unless test_instances.empty?
-
nodes.each do |node|
-
components_array = []
-
test_instances.each do |comp|
-
if comp[:component].include? node[:display_name]
-
components_array << comp
-
end
-
end
-
node_hash[node[:id]] = {:components => components_array, :instance_id => node[:external_ref][:instance_id], :version_context => version_contexts}
-
end
-
end
-
-
# we send elements that are going to be used, due to bad design we need to send an array even
-
# if queue logic is only using size of that array.
-
set_indexes!(node_hash.keys)
-
-
callbacks = {
-
:on_msg_received => proc do |msg|
-
response = CommandAndControl.parse_response__execute_action(nodes,msg)
-
if response and response[:pbuilderid] and response[:status] == :ok
-
node_info = ndx_pbuilderid_to_node_info[response[:pbuilderid]]
-
raw_data = response[:data].map{|r|node_info.merge(r)}
-
#TODO: find better place to put this
-
raw_data.each do |r|
-
if r[:component_name]
-
r[:component_name].gsub!(/__/,'::')
-
end
-
if r[:test_component_name]
-
r[:test_component_name].gsub!(/__/,'::')
-
end
-
end
-
#just for a safe side to filter out empty response, it causes further an error on the client side
-
unless response[:data].empty? or response[:data].nil?
-
packaged_data = DTK::ActionResultsQueue::Result.new(node_info[:display_name],raw_data)
-
push(node_info[:id], (type == :node) ? packaged_data.data : packaged_data)
-
end
-
end
-
end
-
}
-
-
Log.info_pp(:execute_tests_v2 => node_hash)
-
CommandAndControl.request__execute_action_per_node(:execute_tests_v2,:execute_tests_v2,node_hash,callbacks)
-
end
-
1
private
-
1
attr_reader :project,:assembly_instance, :nodes, :action_results_queue, :type, :filter
-
# returns array of augmented (test) components where augmented data
-
# {:attributes => ARRAY[attribute objs),
-
# :node_name => STRING #node associated with base component name
-
# :node_id => ID
-
# :component_name => STRING #base component name
-
# :component_id => ID
-
# there can be multiple entries for same test component for each base component instance
-
1
def get_test_components_with_bindings()
-
ret = Array.new
-
test_cmp_attrs = get_test_component_attributes()
-
if test_cmp_attrs.empty?
-
return ret
-
end
-
# for each binding return at top level the matching test component with attributes substituted with binding value
-
# and augmented columns :node_name and component_name
-
test_cmp_attrs.each do |r|
-
if test_cmp = r[:test_component]
-
#substitute in values from test_cmp_attrs
-
ret << dup_and_substitute_attribute_values(test_cmp,r)
-
else
-
Log.error("Dangling reference to test components (#{test_component_name})")
-
end
-
end
-
ret
-
end
-
-
1
def dup_and_substitute_attribute_values(test_cmp,attr_info)
-
ret = test_cmp.shallow_dup(:display_name,:component_type,:external_ref)
-
ret.merge!(Aux.hash_subset(attr_info,[:component_name,:component_id,:node_name,:node_id]))
-
ret[:attributes] = test_cmp[:attributes].map do |attr|
-
attr_dup = attr.shallow_dup(:display_name)
-
attr_name = attr_dup[:display_name]
-
if matching_attr = attr_info[:attributes].find{|a|a[:related_test_attribute] == attr_name}
-
attr_dup[:attribute_value] = matching_attr[:component_attribute_value]
-
end
-
attr_dup
-
end
-
ret
-
end
-
-
# returns array having test components that are linked to a component in assembly_instance
-
# each element has form
-
# {:test_component=>Cmp Obj
-
# :component_name=>String,
-
# :node_name=>String,
-
# :attributes=>[{:component_attribute_name=>String, :component_attribute_value=>String,:related_test_attribute=>String}
-
1
def get_test_component_attributes()
-
ret = Array.new
-
linked_tests = Component::Test.get_linked_tests(assembly_instance, @project, @filter)
-
if linked_tests.empty?
-
return ret
-
end
-
-
attr_mh = assembly_instance.model_handle(:attribute)
-
all_test_params = []
-
-
linked_tests.each do |t|
-
node = t.node
-
component = t.component
-
component_id = component.id
-
linked_test_array = t.find_relevant_linked_test_array()
-
-
linked_test_array.each do |linked_test|
-
var_mappings_hash = linked_test.var_mappings_hash
-
k, v = var_mappings_hash.first
-
related_test_attribute = v.map { |x| x.split(".").last }
-
attribute_names = k.map { |x| x.split(".").last }
-
test_component = linked_test.test_component
-
# TODO: more efficient to get in bulk outside of test_params loop
-
sp_hash = {
-
:cols => [:display_name, :attribute_value],
-
:filter => [:and,
-
[:eq, :component_component_id,component_id],
-
[:oneof, :display_name, attribute_names]]
-
}
-
ndx_attr_vals = Model.get_objs(attr_mh,sp_hash).inject(Hash.new) do |h,a|
-
h.merge(a[:display_name] => a[:attribute_value])
-
end
-
attributes = Array.new
-
-
attribute_names.each_with_index do |attribute_name, idx|
-
if val = ndx_attr_vals[attribute_name]
-
attributes << {
-
:component_attribute_name => attribute_name,
-
:component_attribute_value => val,
-
:related_test_attribute => related_test_attribute[idx]
-
}
-
end
-
end
-
hash = {
-
:test_component => test_component,
-
:attributes => attributes,
-
:component_id => component_id,
-
:component_name => component[:display_name],
-
:node_id => node[:id],
-
:node_name => node[:display_name]
-
}
-
all_test_params << hash
-
end
-
end
-
all_test_params
-
end
-
-
1
def get_version_contexts(test_components)
-
unless test_components.empty?
-
TestModule::VersionContextInfo.get_in_hash_form(test_components,@assembly_instance)
-
else
-
Log.error("Unexpected that test_components is empty")
-
nil
-
end
-
end
-
-
#TODO: deprecate
-
#TODO: rather than passing in strings, have controller/helper methods convert to ids and objects, rather than passing
-
1
def get_augmented_component_templates(nodes,components)
-
ret = Array.new
-
if nodes.empty?
-
return ret
-
end
-
-
sp_hash = {
-
:cols => [:id,:group_id,:instance_component_template_parent,:node_node_id],
-
:filter => [:oneof,:node_node_id,nodes.map{|n|n.id()}]
-
}
-
ret = Model.get_objs(nodes.first.model_handle(:component),sp_hash).map do |r|
-
r[:component_template].merge(:node_node_id => r[:node_node_id], :component_instance_id => r[:id])
-
end
-
if components.nil? or components.empty? or !components.include? "/"
-
return ret
-
end
-
-
cmp_node_names = components.map do |name_pairs|
-
if name_pairs.include? "/"
-
split = name_pairs.split('/')
-
if split.size == 2
-
{:node_name => split[0],:component_name => Component.display_name_from_user_friendly_name(split[1])}
-
else
-
Log.error("unexpected component form: #{name_pairs}; skipping")
-
nil
-
end
-
else
-
{:component_name => Component.display_name_from_user_friendly_name(name_pairs)}
-
end
-
end.compact
-
ndx_node_names = nodes.inject(Hash.new){|h,n|h.merge(n[:id] => n[:display_name])}
-
-
#only keep matching ones
-
ret.select do |cmp_template|
-
cmp_node_names.find do |r|
-
r[:node_name] == ndx_node_names[cmp_template[:node_node_id]] and r[:component_name] == cmp_template[:display_name]
-
end
-
end
-
end
-
end
-
end
-
end
-
end
-
-
1
module DTK
-
1
class Assembly::Instance
-
1
module Action
-
1
class SSHAccess < ActionResultsQueue
-
1
def initialize(opts={})
-
super()
-
@agent_action = opts[:agent_action]
-
end
-
1
def action_hash()
-
{:agent => :ssh_agent, :method => @agent_action}
-
end
-
#TODO: write this in terms of its parent ActionResultsQueue#initiate or have arent method in terms of reusable fragments
-
1
def initiate(nodes,params,opts={})
-
indexes = nodes.map{|r|r[:id]}
-
set_indexes!(indexes)
-
ndx_pbuilderid_to_node_info = nodes.inject(Hash.new) do |h,n|
-
h.merge(n.pbuilderid => {:id => n[:id], :display_name => n.assembly_node_print_form()})
-
end
-
callbacks = {
-
:on_msg_received => proc do |msg|
-
-
response = CommandAndControl.parse_response__execute_action(nodes,msg)
-
if response and response[:pbuilderid] and response[:status] == :ok
-
node_info = ndx_pbuilderid_to_node_info[response[:pbuilderid]]
-
-
unless response[:data][:error]
-
component_type = :authorized_ssh_public_key
-
attr_hash = {
-
:linux_user => params[:system_user],
-
:key_name => params[:rsa_pub_name],
-
:key_content => params[:rsa_pub_key]
-
}
-
node = nodes.find { |n| n[:id] == node_info[:id] }
-
-
if (@agent_action == :grant_access)
-
Component::Instance::Interpreted.create_or_update?(node,component_type,attr_hash)
-
else
-
Component::Instance::Interpreted.delete(node, component_type, attr_hash)
-
end
-
end
-
-
push(node_info[:display_name],response[:data])
-
else
-
Log.error("Agent '#{msg[:senderagent]}' error, Code: #{msg[:body][:statuscode]} - #{msg[:body][:statusmsg]}")
-
end
-
-
end
-
}
-
CommandAndControl.request__execute_action(:ssh_agent,@agent_action,nodes,callbacks,params)
-
end
-
end
-
end
-
end
-
end
-
-
2
module DTK; class Assembly
-
1
class Instance
-
1
module DeleteClassMixin
-
1
def delete(assembly_idhs,opts={})
-
if assembly_idhs.kind_of?(Array)
-
return if assembly_idhs.empty?
-
else
-
assembly_idhs = [assembly_idhs]
-
end
-
# cannot delete workspaces
-
if workspace = assembly_idhs.find{|idh|Workspace.is_workspace?(idh.create_object())}
-
raise ErrorUsage.new("Cannot delete a workspace")
-
end
-
Delete.contents(assembly_idhs,opts)
-
delete_instances(assembly_idhs)
-
end
-
-
1
def delete_contents(assembly_idhs,opts={})
-
Delete.contents(assembly_idhs,opts)
-
end
-
end
-
-
1
module DeleteMixin
-
1
def destroy_and_reset_nodes()
-
nodes = Delete.get_nodes_simple(model_handle(:node),[id()])
-
# TODO: DTK-1857
-
if nodes.find{|n|n.is_node_group?()}
-
raise ErrorUsage.new("destroy_and_reset_nodes not supported for service instances with node groups")
-
end
-
target_idh = get_target.id_handle()
-
nodes.map{|node|node.destroy_and_reset(target_idh)}
-
end
-
-
1
def delete_node(node_idh,opts={})
-
node = node_idh.create_object()
-
# TODO: check if cleaning up dangling links when assembly node deleted
-
if node.is_node_group?
-
node.update_object!(:display_name)
-
raise ErrorUsage.new("Node with name '#{node[:display_name]}' does not exist. If you want to delete node group you can use 'delete-node-group node-group-name'")
-
end
-
-
if node_group = is_node_group_member?(node_idh)
-
# if node-group member and last one then delete node group as well
-
node_group = node_group.create_obj_optional_subclass()
-
Delete.node(node,opts.merge(:update_task_template=>true,:assembly=>self))
-
node_group.delete_object({:update_task_template=>true, :assembly=>self}) if node_group.get_node_group_members.size == 0
-
else
-
Delete.node(node,opts.merge(:update_task_template=>true,:assembly=>self))
-
end
-
end
-
-
1
def delete_node_group(node_group_idh)
-
node_group = node_group_idh.create_object()
-
-
unless node_group.is_node_group?
-
node_group.update_object!(:display_name)
-
raise ErrorUsage.new("Node group with name '#{node_group[:display_name]}' does not exist")
-
end
-
-
node_group = node_group.create_obj_optional_subclass()
-
node_group.delete_group_members(0)
-
node_group.delete_object({:update_task_template=>true, :assembly=>self})
-
end
-
-
1
def delete_component(component_idh, node_id=nil)
-
component_filter = [:and, [:eq, :id, component_idh.get_id()], [:eq, :assembly_id, id()]]
-
node = nil
-
# first check that node belongs to this assebmly
-
if node_id.kind_of?(Fixnum)
-
sp_hash = {
-
:cols => [:id, :display_name,:group_id],
-
:filter => [:and, [:eq, :id, node_id], [:eq, :assembly_id, id()]]
-
}
-
-
unless node = Model.get_obj(model_handle(:node),sp_hash)
-
raise ErrorIdInvalid.new(node_id,:node)
-
end
-
component_filter << [:eq, :node_node_id, node_id]
-
end
-
-
# also check that component_idh belongs to this instance and to this node
-
sp_hash = {
-
#:only_one_per_node,:ref are put in for info needed when getting title
-
:cols => [:id, :display_name, :node_node_id,:only_one_per_node,:ref],
-
:filter => component_filter
-
}
-
component = Component::Instance.get_obj(model_handle(:component),sp_hash)
-
unless component
-
raise ErrorIdInvalid.new(component_idh.get_id(),:component)
-
end
-
node ||= component_idh.createIDH(:model_name => :node,:id => component[:node_node_id]).create_object()
-
ret = nil
-
Transaction do
-
node.update_dangling_links(:component_idhs => [component.id_handle()])
-
Task::Template::ConfigComponents.update_when_deleted_component?(self,node,component)
-
ret = Model.delete_instance(component_idh)
-
end
-
ret
-
end
-
end
-
-
1
class Delete < self
-
1
def Delete.contents(assembly_idhs,opts={})
-
return if assembly_idhs.empty?
-
delete(get_sub_assemblies(assembly_idhs).map{|r|r.id_handle()})
-
assembly_ids = assembly_idhs.map{|idh|idh.get_id()}
-
idh = assembly_idhs.first
-
Delete.assembly_modules?(assembly_idhs,opts)
-
# Delete.assembly_modules? needs to be done before Delete.assembly_nodes
-
Delete.assembly_nodes(idh.createMH(:node),assembly_ids,opts)
-
Delete.task_templates(idh.createMH(:task_template),assembly_ids)
-
end
-
-
1
def self.get_nodes_simple(node_mh,assembly_ids)
-
assembly_idhs = assembly_ids.map{|id|node_mh.createIDH(:id => id,:model_name => :assembly_instance)}
-
Assembly::Instance.get_nodes_simple(assembly_idhs,:ret_subclasses=>true)
-
end
-
-
1
private
-
1
def Delete.task_templates(task_template_mh,assembly_ids)
-
sp_hash = {
-
:cols => [:id,:display_name],
-
:filter => [:oneof,:component_component_id,assembly_ids]
-
}
-
delete_instances(get_objs(task_template_mh,sp_hash).map{|tt|tt.id_handle()})
-
end
-
-
1
def Delete.assembly_modules?(assembly_idhs,opts={})
-
assembly_idhs.each do |assembly_idh|
-
assembly = create_from_id_handle(assembly_idh)
-
AssemblyModule.delete_modules?(assembly,opts)
-
end
-
end
-
-
# This only deletes the nodes that the assembly 'owns'; with sub-assemblies, the assembly base will own the node
-
1
def Delete.assembly_nodes(node_mh,assembly_ids,opts={})
-
Delete.nodes(node_mh,assembly_ids,opts)
-
end
-
-
1
def Delete.nodes(node_mh,assembly_ids,opts={})
-
nodes = get_nodes_simple(node_mh,assembly_ids)
-
nodes.map{|node|Delete.node(node,opts)}
-
end
-
-
# TODO: double check if Transaction needed; if so look at whether for same reason put in destoy and reset
-
1
def Delete.node(node,opts={})
-
ret = nil
-
Transaction do
-
ret =
-
if opts[:destroy_nodes]
-
node.destroy_and_delete(opts)
-
else
-
node.delete_object(opts)
-
end
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
3
module DTK; class Assembly; class Instance
-
1
module Get
-
1
r8_nested_require('get','attribute')
-
end
-
1
module GetMixin
-
1
include Get::AttributeMixin
-
-
1
def get_objs(sp_hash,opts={})
-
super(sp_hash,opts.merge(:model_handle => model_handle().createMH(:assembly_instance)))
-
end
-
-
# get associated task template
-
1
def get_parent()
-
Template.create_from_component(get_obj_helper(:instance_parent,:assembly_template))
-
end
-
-
1
def get_target()
-
get_obj_helper(:target,:target)
-
end
-
-
1
def get_target_idh()
-
id_handle().get_parent_id_handle_with_auth_info()
-
end
-
-
-
#### get methods around attribute mappings
-
1
def get_augmented_attribute_mappings()
-
# TODO: once field assembly_id is always populated on attribute.link, can do simpler query
-
ret = Array.new
-
sp_hash = {
-
:cols => [:id,:group_id],
-
:filter => [:eq,:assembly_id,id()]
-
}
-
port_links = Model.get_objs(model_handle(:port_link),sp_hash)
-
filter = [:or,[:oneof,:port_link_id,port_links.map{|r|r.id()}],[:eq,:assembly_id,id()]]
-
AttributeLink.get_augmented(model_handle(:attribute_link),filter)
-
end
-
#### end: get methods around attribute mappings
-
-
#### get methods around components
-
1
def get_component_info_for_action_list(opts={})
-
get_field?(:display_name)
-
assembly_source = {:type => "assembly", :object => hash_subset(:id,:display_name)}
-
rows = get_objs_helper(:instance_component_list,:nested_component,opts.merge(:augmented => true))
-
Component::Instance.add_title_fields?(rows)
-
Component::Instance.add_action_defs!(rows,:cols=>[:method_name])
-
ret = opts[:add_on_to]||opts[:seed]||Array.new
-
rows.each{|r|ret << r.merge(:source => assembly_source)}
-
ret
-
end
-
-
1
def get_peer_component_instances(cmp_instance)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_type],
-
:filter => [:and,[:eq,:ancestor_id,cmp_instance.get_field?(:ancestor_id)],
-
[:eq,:assembly_id,id()],
-
[:neq,:id,cmp_instance.id()]]
-
}
-
Component::Instance.get_objs(model_handle(:component_instance),sp_hash)
-
end
-
-
1
def get_component_instances(opts={})
-
sp_hash = {
-
:cols => opts[:cols] || [:id,:group_id,:display_name,:component_type],
-
:filter => [:eq,:assembly_id,id()]
-
}
-
Component::Instance.get_objs(model_handle(:component_instance),sp_hash)
-
end
-
-
-
1
def get_augmented_components(opts=Opts.new)
-
ret = Array.new
-
rows = get_objs(:cols => [:instance_nodes_and_cmps_summary_with_namespace])
-
-
if opts[:filter_proc]
-
rows.reject!{|r|!opts[:filter_proc].call(r)}
-
elsif opts[:filter_component] != ""
-
opts[:filter_component].sub!(/::/, "__")
-
rows.reject!{|r| r[:nested_component][:display_name] != opts[:filter_component] }
-
end
-
-
return ret if rows.empty?
-
-
components = Array.new
-
rows.each do |r|
-
if cmp = r[:nested_component]
-
# add node and namespace hash information to component hash
-
components << cmp.merge(r.hash_subset(:node))#.merge!(r.hash_subset(:namespace)))
-
end
-
end
-
-
if opts.array(:detail_to_include).include?(:component_dependencies)
-
Dependency::All.augment_component_instances!(self,components, Opts.new(:ret_statisfied_by => true))
-
end
-
components
-
end
-
#### end: get methods around components
-
-
#### get methods around component modules
-
1
def get_component_modules(opts={})
-
AssemblyModule::Component.get_for_assembly(self,opts)
-
end
-
#### end: get methods around component modules
-
-
#### get methods around nodes
-
1
def get_leaf_nodes(opts={})
-
get_nodes__expand_node_groups(opts.merge(:remove_node_groups=>true))
-
end
-
-
1
def get_nodes__expand_node_groups(opts={})
-
cols = opts[:cols]||Node.common_columns()
-
node_or_ngs = get_nodes(*cols)
-
ServiceNodeGroup.expand_with_node_group_members?(node_or_ngs,opts)
-
end
-
-
1
def get_node_groups(opts={})
-
cols = opts[:cols]||Node.common_columns()
-
node_or_ngs = get_nodes(*cols)
-
ServiceNodeGroup.get_node_groups?(node_or_ngs)
-
end
-
-
1
def get_node?(filter)
-
sp_hash = {
-
:cols => [:id,:display_name],
-
:filter => [:and,[:eq, :assembly_id, id()],filter]
-
}
-
rows = Model.get_objs(model_handle(:node),sp_hash)
-
if rows.size > 1
-
Log.error("Unexpected that more than one row returned for filter (#{filter.inspect})")
-
return nil
-
end
-
rows.first
-
end
-
-
# TODO: rename to reflect that not including node group members, just node groups themselves and top level nodes
-
# This is equivalent to saying that this does not return target_refs
-
1
def get_nodes(*alt_cols)
-
self.class.get_nodes([id_handle],*alt_cols)
-
end
-
#### end: get methods around nodes
-
-
#### end: get methods around ports
-
# augmented with node, :component and link def info
-
1
def get_augmented_ports(opts={})
-
ndx_ret = Hash.new
-
ret = get_objs(:cols => [:augmented_ports]).map do |r|
-
link_def = r[:link_def]
-
if link_def.nil? or (link_def[:link_type] == r[:port].link_def_name())
-
if get_augmented_ports__matches_on_title?(r[:nested_component],r[:port])
-
r[:port].merge(r.slice(:node,:nested_component,:link_def))
-
end
-
end
-
end.compact
-
if opts[:mark_unconnected]
-
get_augmented_ports__mark_unconnected!(ret,opts)
-
end
-
ret
-
end
-
-
# TODO: more efficient if can do the 'title' match on sql side
-
1
def get_augmented_ports__matches_on_title?(component,port)
-
ret = true
-
if cmp_title = ComponentTitle.title?(component)
-
ret = (cmp_title == port.title?())
-
end
-
ret
-
end
-
1
private :get_augmented_ports__matches_on_title?
-
-
# TODO: there is a field on ports :connected, but it is not correctly updated so need to get ports links to find out what is connected
-
1
def get_augmented_ports__mark_unconnected!(aug_ports,opts={})
-
port_links = get_port_links()
-
connected_ports = port_links.map{|r|[r[:input_id],r[:output_id]]}.flatten.uniq
-
aug_ports.each do |r|
-
if r[:direction] == "input"
-
r[:unconnected] = !connected_ports.include?(r[:id])
-
end
-
end
-
end
-
1
private :get_augmented_ports__mark_unconnected!
-
#### end: get methods around ports
-
-
#### get methods around service add ons
-
1
def get_service_add_ons()
-
get_objs_helper(:service_add_ons_from_instance,:service_add_on)
-
end
-
-
1
def get_augmented_service_add_ons()
-
get_objs_helper(:aug_service_add_ons_from_instance,:service_add_on,:augmented => true)
-
end
-
1
def get_augmented_service_add_on(add_on_name)
-
filter_proc = lambda{|sao|sao[:service_add_on][:display_name] == add_on_name}
-
get_obj_helper(:aug_service_add_ons_from_instance,:service_add_on,:filter_proc => filter_proc, :augmented => true)
-
end
-
-
#### end: get methods around service add ons
-
1
def get_tasks(opts={})
-
rows = get_objs(:cols => [:tasks])
-
if opts[:filter_proc]
-
rows.reject!{|r|!opts[:filter_proc].call(r)}
-
end
-
rows.map{|r|r[:task]}
-
end
-
-
#### get methods around task templates
-
1
def get_task_templates(opts={})
-
sp_hash = {
-
:cols => Task::Template.common_columns(),
-
:filter => [:eq,:component_component_id,id()]
-
}
-
Model.get_objs(model_handle(:task_template),sp_hash)
-
end
-
-
1
def get_task_template(task_action=nil,opts={})
-
task_action ||= Task::Template.default_task_action()
-
sp_hash = {
-
:cols => opts[:cols]||Task::Template.common_columns(),
-
:filter => [:and,[:eq,:component_component_id,id()],
-
[:eq,:task_action,task_action]]
-
}
-
Model.get_obj(model_handle(:task_template),sp_hash)
-
end
-
-
1
def get_task_template_serialized_content(task_action=nil,opts={})
-
action_types = [:assembly] # TODO: action_types can be set to [:assembly,:node_centric] if treating inventory node groups
-
opts_task_gen = {:serialized_form => true}.merge(opts)
-
opts_task_gen.merge!(:task_action => task_action) if task_action
-
-
ret = Task::Template::ConfigComponents.get_or_generate_template_content(action_types,self,opts_task_gen)
-
ret && ret.serialization_form(opts[:serialization_form]||{})
-
end
-
-
1
def get_task_templates_with_serialized_content()
-
ret = Array.new
-
-
opts = {
-
:component_type_filter => :service,
-
:serialization_form => {:filter => {:source => :assembly}, :allow_empty_task=>true}
-
}
-
-
# TODO: only returning now the task templates for the default (assembly create action)
-
# this is done by setting task action as nil
-
task_action = nil
-
if serialized_content = get_task_template_serialized_content(task_action,opts)
-
action_task_template = get_task_template(task_action,:cols => [:id,:group_id,:task_action])
-
action_task_template ||= Assembly::Instance.create_stub(model_handle(:task_template))
-
ret << action_task_template.merge(:content => serialized_content)
-
end
-
ret
-
end
-
-
1
def get_parents_task_template(task_action=nil)
-
task_action ||= Task::Template.default_task_action()
-
get_objs_helper(:parents_task_templates,:task_template).select{|r|r[:task_action]==task_action}.first
-
end
-
#### end: get methods around task templates
-
-
1
def get_sub_assemblies()
-
self.class.get_sub_assemblies([id_handle()])
-
end
-
-
end
-
-
1
module GetClassMixin
-
1
def get_objs(mh,sp_hash,opts={})
-
if mh[:model_name] == :assembly_instance
-
get_these_objs(mh,sp_hash,opts)
-
else
-
super
-
end
-
end
-
-
1
def get(assembly_mh, opts={})
-
target_idhs = (opts[:target_idh] ? [opts[:target_idh]] : opts[:target_idhs])
-
target_filter = (target_idhs ? [:oneof, :datacenter_datacenter_id, target_idhs.map{|idh|idh.get_id()}] : [:neq, :datacenter_datacenter_id, nil])
-
filter = [:and, [:eq, :type, "composite"], target_filter,opts[:filter]].compact
-
sp_hash = {
-
:cols => opts[:cols]||[:id,:group_id,:display_name],
-
:filter => filter
-
}
-
get_these_objs(assembly_mh,sp_hash,:keep_ref_cols=>true) #:keep_ref_cols=>true just in case ref col
-
end
-
-
1
def get_info__flat_list(assembly_mh, opts={})
-
target_idh = opts[:target_idh]
-
target_filter = (target_idh ? [:eq, :datacenter_datacenter_id, target_idh.get_id()] : [:neq, :datacenter_datacenter_id, nil])
-
filter = [:and, [:eq, :type, "composite"], target_filter,opts[:filter]].compact
-
col,needs_empty_nodes = list_virtual_column?(opts[:detail_level])
-
cols = [:id,:ref,:display_name,:group_id,:component_type,:version,:created_at,col].compact
-
ret = get(assembly_mh,{:cols => cols}.merge(opts))
-
return ret unless needs_empty_nodes
-
-
# add in in assembly nodes without components on them
-
nodes_ids = ret.map{|r|(r[:node]||{})[:id]}.compact
-
sp_hash = {
-
:cols => [:id, :display_name,:component_type,:version,:instance_nodes_and_assembly_template],
-
:filter => filter
-
}
-
assembly_empty_nodes = get_objs(assembly_mh,sp_hash).reject{|r|nodes_ids.include?((r[:node]||{})[:id])}
-
ret + assembly_empty_nodes
-
end
-
-
1
def get_workspace_object(assembly_mh, opts={})
-
target_idh = opts[:target_idh]
-
target_filter = (target_idh ? [:eq, :datacenter_datacenter_id, target_idh.get_id()] : [:neq, :datacenter_datacenter_id, nil])
-
filter = [:and, [:eq, :type, "composite"],[:eq, :ref, '__workspace'], target_filter,opts[:filter]].compact
-
col,needs_empty_nodes = list_virtual_column?(opts[:detail_level])
-
sp_hash = {
-
:cols => [:id, :display_name,:group_id,:component_type,:version,col].compact,
-
:filter => filter
-
}
-
get_these_objs(assembly_mh,sp_hash)
-
end
-
-
#### get methods around nodes
-
1
def get_nodes(assembly_idhs,*alt_cols)
-
ret = Array.new
-
return ret if assembly_idhs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:node_node_id],
-
:filter => [:oneof, :assembly_id, assembly_idhs.map{|idh|idh.get_id()}]
-
}
-
ndx_nodes = Hash.new
-
component_mh = assembly_idhs.first.createMH(:component)
-
get_objs(component_mh,sp_hash).each do |cmp|
-
ndx_nodes[cmp[:node_node_id]] ||= true
-
end
-
-
cols = ([:id,:display_name,:group_id,:type] + alt_cols).uniq
-
sp_hash = {
-
:cols => cols,
-
:filter => [:and, filter_out_target_refs(),
-
[:or,[:oneof, :id, ndx_nodes.keys],
-
#to catch nodes without any components
-
[:oneof, :assembly_id,assembly_idhs.map{|idh|idh.get_id()}]]
-
]
-
}
-
node_mh = assembly_idhs.first.createMH(:node)
-
get_objs(node_mh,sp_hash)
-
end
-
-
# TODO: rename to reflect that not including node group members, just node groups themselves and top level nodes
-
# This is equivalent to saying that this does not return target_refs
-
1
def get_nodes_simple(assembly_idhs,opts={})
-
ret = Array.new
-
return ret if assembly_idhs.empty?()
-
sp_hash = {
-
:cols => opts[:cols] || [:id,:display_name,:group_id,:type,:assembly_id],
-
:filter => [:oneof,:assembly_id,assembly_idhs.map{|idh|idh.get_id()}]
-
}
-
node_mh = assembly_idhs.first.createMH(:node)
-
ret = get_objs(node_mh,sp_hash)
-
unless opts[:ret_subclasses]
-
ret
-
else
-
ret.map do |r|
-
r.is_node_group? ? r.id_handle().create_object(:model_name => :service_node_group).merge(r) : r
-
end
-
end
-
end
-
#### end: get methods around nodes
-
-
1
def get_sub_assemblies(assembly_idhs)
-
ret = Array.new
-
return ret if assembly_idhs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:and,[:oneof,:assembly_id,assembly_idhs.map{|idh|idh.get_id()}],[:eq,:type,"composite"]]
-
}
-
get_objs(assembly_idhs.first.createMH(),sp_hash).map{|a|a.copy_as_assembly_instance()}
-
end
-
-
1
private
-
1
def filter_out_target_refs()
-
@filter_out_target_ref ||= [:and] + Node::TargetRef.types.map{|t|[:neq, :type, t]}
-
end
-
end
-
end; end; end
-
-
4
module DTK; class Assembly; class Instance; module Get
-
1
module AttributeMixin
-
1
def get_attributes_print_form(opts={})
-
if filter = opts[:filter]
-
case filter
-
when :required_unset_attributes
-
opts.merge!(:filter_proc => FilterProc)
-
else
-
raise Error.new("not treating filter (#{filter}) in Assembly::Instance#get_attributes_print_form")
-
end
-
end
-
get_attributes_print_form_aux(opts)
-
end
-
1
FilterProc = lambda do |r|
-
attr =
-
if r.kind_of?(Attribute) then r
-
elsif r[:attribute] then r[:attribute]
-
else raise Error.new("Unexpected form for filtered element (#{r.inspect})")
-
end
-
attr.required_unset_attribute?()
-
end
-
-
1
def get_attributes_all_levels()
-
assembly_attrs = get_assembly_level_attributes()
-
component_attrs = get_augmented_nested_component_attributes()
-
node_attrs = get_augmented_node_attributes()
-
assembly_attrs + component_attrs + node_attrs
-
end
-
-
1
AttributesAllLevels = Struct.new(:assembly_attrs,:component_attrs,:node_attrs)
-
1
def get_attributes_all_levels_struct(filter_proc=nil)
-
assembly_attrs = get_assembly_level_attributes(filter_proc)
-
component_atttrs = get_augmented_nested_component_attributes(filter_proc).reject do |attr|
-
(not attr[:nested_component].get_field?(:only_one_per_node)) and attr.is_title_attribute?()
-
end
-
node_attrs = get_augmented_node_attributes(filter_proc)
-
AttributesAllLevels.new(assembly_attrs,component_atttrs,node_attrs)
-
end
-
1
def get_augmented_nested_component_attributes(filter_proc=nil)
-
get_objs_helper(:instance_nested_component_attributes,:attribute,:filter_proc => filter_proc,:augmented => true)
-
end
-
-
1
def get_augmented_node_attributes(filter_proc=nil)
-
get_objs_helper(:node_attributes,:attribute,:filter_proc => filter_proc,:augmented => true)
-
end
-
-
1
private
-
1
def get_attributes_print_form_aux(opts=Opts.new)
-
filter_proc = opts[:filter_proc]
-
all_attrs = get_attributes_all_levels_struct(filter_proc)
-
-
# remove all assembly_wide_node attributes
-
all_attrs.node_attrs.reject!{|r| r[:node] && r[:node][:type].eql?('assembly_wide')}
-
-
filter_proc = opts[:filter_proc]
-
assembly_attrs = all_attrs.assembly_attrs.map do |attr|
-
attr.print_form(opts.merge(:level => :assembly))
-
end
-
-
opts_attr = opts.merge(:level => :component,:assembly => self)
-
component_attrs = Attribute.print_form(all_attrs.component_attrs,opts_attr)
-
-
node_attrs = all_attrs.node_attrs.map do |aug_attr|
-
aug_attr.print_form(opts.merge(:level => :node))
-
end
-
(assembly_attrs + node_attrs + component_attrs).sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
end
-
end; end; end; end
-
-
2
module DTK; class Assembly
-
1
class Instance
-
1
module ListClassMixin
-
1
def list_with_workspace(assembly_mh,opts={})
-
get(assembly_mh,opts)
-
end
-
-
1
def list(assembly_mh,opts={})
-
assembly_mh = assembly_mh.createMH(:assembly_instance) # to insure right mh type
-
assembly_rows = get_info__flat_list(assembly_mh, opts)
-
assembly_rows.reject! { |r| Workspace.is_workspace?(r) } unless opts[:include_workspace]
-
-
if opts[:detail_level].nil?
-
if opts[:include_namespaces]
-
Log.error('Unexpected that opts[:include_namespaces] is true')
-
end
-
list_aux__no_details(assembly_rows)
-
else
-
get_attrs = [opts[:detail_level]].flatten.include?('attributes')
-
attr_rows = get_attrs ? get_default_component_attributes(assembly_mh, assembly_rows) : []
-
add_last_task_run_status!(assembly_rows, assembly_mh)
-
-
if opts[:include_namespaces]
-
assembly_templates = assembly_rows.map { |a| a[:assembly_template] }.compact
-
Template.augment_with_namespaces!(assembly_templates)
-
end
-
list_aux(assembly_rows, attr_rows, opts)
-
end
-
end
-
-
1
def pretty_print_name(assembly,opts={})
-
assembly.get_field?(:display_name)
-
end
-
-
1
def get_last_task_run_status(assembly_rows,assembly_mh)
-
add_last_task_run_status!(assembly_rows,assembly_mh)
-
end
-
-
1
private
-
1
def list_aux__no_details(assembly_rows)
-
assembly_rows.map do |r|
-
r.prune_with_values(:display_name => pretty_print_name(r))
-
end
-
end
-
-
1
def add_last_task_run_status!(assembly_rows,assembly_mh)
-
sp_hash = {
-
:cols => [:id,:started_at,:assembly_id,:status],
-
:filter => [:oneof,:assembly_id,assembly_rows.map{|r|r[:id]}]
-
}
-
ndx_task_rows = Hash.new
-
get_objs(assembly_mh.createMH(:task),sp_hash).each do |task|
-
next unless task[:started_at]
-
assembly_id = task[:assembly_id]
-
if pntr = ndx_task_rows[assembly_id]
-
if task[:started_at] > pntr[:started_at]
-
ndx_task_rows[assembly_id] = task.slice(:status,:started_at)
-
end
-
else
-
ndx_task_rows[assembly_id] = task.slice(:status,:started_at)
-
end
-
end
-
assembly_rows.each do |r|
-
if node = r[:node]
-
if last_task_run_status = ndx_task_rows[r[:id]] && ndx_task_rows[r[:id]][:status]
-
r[:last_task_run_status] = last_task_run_status
-
end
-
end
-
end
-
assembly_rows
-
end
-
-
end
-
-
1
module ListMixin
-
1
def info_about(about,opts=Opts.new)
-
case about
-
when :attributes
-
list_attributes(opts)
-
when :components
-
list_components(opts)
-
when :nodes
-
opts.merge!(:cols => Node.common_columns()+[:target])
-
list_nodes(opts)
-
when :modules
-
list_component_modules(opts)
-
when :tasks
-
list_tasks(opts)
-
else
-
raise Error.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end
-
end
-
-
1
def list_attributes(opts=Opts.new)
-
if opts[:settings_form]
-
filter_proc = opts[:filter_proc]
-
attrs_all_levels_struct = get_attributes_all_levels_struct(filter_proc)
-
ServiceSetting::AttributeSettings::HashForm.render(attrs_all_levels_struct)
-
else
-
cols_to_get = (opts[:raw_attribute_value] ? [:display_name,:value] : [:id,:display_name,:value,:linked_to_display_form,:datatype,:name])
-
ret = get_attributes_print_form_aux(opts).map do |a|
-
Aux::hash_subset(a,cols_to_get)
-
end.sort{|a,b| a[:display_name] <=> b[:display_name] }
-
opts[:raw_attribute_value] ? ret.inject(Hash.new){|h,r|h.merge(r[:display_name] => r[:value])} : ret
-
end
-
end
-
-
1
def list_component_modules(opts=Opts.new)
-
component_modules_opts = {:recursive => true}
-
if get_version_info = opts.array(:detail_to_include).include?(:version_info)
-
opts.set_datatype!(:assembly_component_module)
-
component_modules_opts.merge!(:get_version_info=>true)
-
end
-
unsorted_ret = get_component_modules(component_modules_opts)
-
if get_version_info
-
unsorted_ret.each do |r|
-
if r[:local_copy]
-
branch_relationship = r[:branch_relationship]||''
-
local_ahead_or_branchpt = branch_relationship.eql?(:local_ahead) || branch_relationship.eql?(:branchpoint)
-
r[:update_saved] = !(r[:local_copy_diff] && local_ahead_or_branchpt)
-
end
-
end
-
end
-
unsorted_ret.sort{|a,b| a[:display_name] <=> b[:display_name] }
-
end
-
-
1
def list_nodes(opts=Opts.new)
-
opts.merge!(:remove_node_groups=>false)
-
nodes = get_nodes__expand_node_groups(opts)
-
-
nodes.each do |node|
-
set_node_display_name!(node)
-
set_node_admin_op_status!(node)
-
if external_ref = node[:external_ref]
-
external_ref[:dns_name] ||= external_ref[:routable_host_address] #TODO: should be cleaner place to put this
-
end
-
if target = node[:target]
-
target[:iaas_properties][:security_group] ||=
-
target[:iaas_properties][:security_group_set].join(',') if target[:iaas_properties][:security_group_set]
-
end
-
node.sanitize!()
-
-
# we set dtk-client-type since we need to distinguish between node / node-group
-
is_node_group_member = is_node_group_member?(node.id_handle())
-
-
# if node is not part of node group we set nil
-
node[:dtk_client_type] = node.is_node_group? ? :node_group : is_node_group_member ? :node_group_node : nil
-
-
# remove node group or assembly wide node from list commands
-
node[:dtk_client_hidden] = node.is_node_group? || node[:type].eql?('assembly_wide')
-
-
# remove assembly wide node from dtk context
-
node[:dtk_context_hidden] = node[:type].eql?('assembly_wide')
-
end
-
-
nodes.sort{|a,b| a[:display_name] <=> b[:display_name] }
-
end
-
-
-
1
private :list_nodes
-
1
def set_node_display_name!(node)
-
node[:display_name] = node.assembly_node_print_form()
-
end
-
1
def set_node_admin_op_status!(node)
-
if node.is_node_group?()
-
node[:admin_op_status] = nil
-
end
-
end
-
1
private :set_node_display_name!,:set_node_admin_op_status!
-
-
1
def list_components(opts=Opts.new)
-
aug_cmps = get_augmented_components(opts)
-
node_cmp_name = opts[:node_cmp_name]
-
-
cmps_print_form = aug_cmps.map do |r|
-
type = r[:node][:type]
-
namespace = r[:namespace]
-
node_name = "#{r[:node][:display_name]}/"
-
hide_node_name = node_cmp_name || type.eql?('assembly_wide')
-
display_name = "#{hide_node_name ? '' : node_name}#{Component::Instance.print_form(r, namespace)}"
-
r.hash_subset(:id).merge({:display_name => display_name})
-
end
-
-
sort = proc{|a,b|a[:display_name] <=> b[:display_name]}
-
if opts.array(:detail_to_include).include?(:component_dependencies)
-
opts.set_datatype!(:component_with_dependencies)
-
list_components__with_deps(cmps_print_form,aug_cmps,sort)
-
else
-
opts.set_datatype!(:component)
-
cmps_print_form.sort(&sort)
-
end
-
end
-
-
1
def display_name_print_form(opts={})
-
pretty_print_name()
-
end
-
-
1
def list_smoketests()
-
Log.error("TODO: needs to be tested")
-
nodes_and_cmps = get_info__flat_list(:detail_level => "components")
-
nodes_and_cmps.map{|r|r[:nested_component]}.select{|cmp|cmp[:basic_type] == "smoketest"}.map{|cmp|Aux::hash_subset(cmp,[:id,:display_name,:description])}
-
end
-
-
1
def print_includes()
-
ModuleRefs::Tree.create(self).hash_form()
-
end
-
-
1
private
-
1
def list_tasks(opts={})
-
tasks = []
-
rows = get_objs(:cols => [:tasks])
-
rows.each do |row|
-
task = row[:task]
-
task_obj_idh = task.id_handle()
-
task_mh = task_obj_idh.createMH(:task)
-
task_structure = Task.get_hierarchical_structure(task_mh.createIDH(:id => task[:id]))
-
status_opts = {}
-
tasks << task_structure.status_table_form(status_opts)
-
end
-
tasks.flatten
-
end
-
-
1
def list_components__with_deps(cmps_print_form,aug_cmps,main_table_sort)
-
ndx_component_print_form = ret_ndx_component_print_form(aug_cmps,cmps_print_form)
-
join_columns = OutputTable::JoinColumns.new(aug_cmps) do |aug_cmp|
-
if deps = aug_cmp[:dependencies]
-
ndx_els = Hash.new
-
deps.each do |dep|
-
if depends_on = dep.depends_on_print_form?()
-
el = ndx_els[depends_on] ||= Array.new
-
sb_cmp_ids = dep.satisfied_by_component_ids
-
ndx_els[depends_on] += (sb_cmp_ids - el)
-
end
-
end
-
ndx_els.map do |depends_on,sb_cmp_ids|
-
satisfied_by = (sb_cmp_ids.empty? ? nil : sb_cmp_ids.map{|cmp_id|ndx_component_print_form[cmp_id]}.join(', '))
-
{:depends_on => depends_on, :satisfied_by => satisfied_by}
-
end
-
end
-
end
-
OutputTable.join(cmps_print_form,join_columns,&main_table_sort)
-
end
-
-
1
def ret_ndx_component_print_form(aug_cmps,cmps_with_print_form)
-
# has lookup that includes each satisfied_by_component
-
ret = cmps_with_print_form.inject(Hash.new){|h,cmp|h.merge(cmp[:id] => cmp[:display_name])}
-
-
# see if theer is any components that are nreferenced but not in ret
-
needed_cmp_ids = Array.new
-
aug_cmps.each do |aug_cmp|
-
if deps = aug_cmp[:dependencies]
-
deps.map do |dep|
-
dep.satisfied_by_component_ids.each do |cmp_id|
-
needed_cmp_ids << cmp_id if ret[cmp_id].nil?
-
end
-
end
-
end
-
end
-
return ret if needed_cmp_ids.empty?
-
-
filter_array = needed_cmp_ids.map{|cmp_id|[:eq,:id,cmp_id]}
-
filter = (filter_array.size == 1 ? filter_array.first : [:or] + filter_array)
-
additional_cmps = list_components(Opts.new(:filter => filter))
-
additional_cmps.inject(ret){|h,cmp|h.merge(cmp[:id] => cmp[:display_name])}
-
end
-
-
end
-
end
-
end; end
-
3
module DTK; class Assembly; class Instance
-
1
module OpStatus
-
1
module ClassMixin
-
# returns
-
# 'running' - if at least one node is running
-
# 'stopped' - if there is atleast one node stopped and no nodes running
-
# 'pending' - if all nodes are pending or no nodes
-
# nil - if cant tell
-
1
def op_status(assembly_nodes)
-
return 'pending' if assembly_nodes.empty?
-
stop_found = false
-
assembly_nodes.each do |node|
-
case node[:admin_op_status]
-
when 'running'
-
return 'running'
-
when 'stopped'
-
stop_found = true
-
when 'pending'
-
# no op
-
else
-
return nil
-
end
-
end
-
stop_found ? 'stopped' : 'pending'
-
end
-
-
1
def op_status_all_pending?(assembly_nodes)
-
assembly_nodes.find do |node|
-
status = node[:admin_op_status]
-
status.nil? or status != 'pending'
-
end.nil?
-
end
-
end
-
-
1
module Mixin
-
1
def any_stopped_nodes?()
-
!!get_leaf_nodes(:cols => [:id,:admin_op_status]).find{|node|node[:admin_op_status] == 'stopped'}
-
end
-
-
# TODO: check that nelow correctly dont use get_leaf_nodes
-
1
def op_status()
-
assembly_nodes = get_nodes(:admin_op_status)
-
self.class.op_status(assembly_nodes)
-
end
-
-
1
def op_status_all_pending?()
-
assembly_nodes = get_nodes(:admin_op_status)
-
self.class.op_status_all_pending?(assembly_nodes)
-
end
-
###########
-
-
##
-
# Method that will validate if nodes list is ready to started or stopped.
-
#
-
# * *Args* :
-
# - +assembly_id+ -> assembly id
-
# - +node_or_ngs+ -> array containig node or node group elements
-
# - +node_pattern+ -> match id regexp pattern
-
# - +status_pattern+ -> pattern to match node status
-
# * *Returns* :
-
# - is valid flag
-
# - filtered nodes by pattern (if pattern not nil)
-
# - error message in case it is not valid
-
#
-
1
def nodes_valid_for_stop_or_start(node_pattern, status_pattern)
-
nodes = get_leaf_nodes()
-
-
# do not start/stop assembly wide nodes
-
nodes.delete_if{|n| n[:type].eql?('assembly_wide')}
-
-
# check for pattern
-
unless node_pattern.nil? || node_pattern.empty?
-
regex = Regexp.new(node_pattern)
-
-
# temp nodes_list
-
nodes_list = nodes
-
-
nodes = nodes.select { |node| regex =~ node.id.to_s}
-
if nodes.size == 0
-
nodes = nodes_list.select { |node| node_pattern.to_s.eql?(node.display_name.to_s)}
-
return nodes, false, "No nodes have been matched via ID ~ '#{node_pattern}'." if nodes.size == 0
-
end
-
end
-
# check if staged
-
nodes.each do |node|
-
if node.get_field?(:type) == Node::Type::Node.staged
-
assembly_name = pretty_print_name()
-
return nodes, false, "Nodes for assembly '#{assembly_name}' are 'staged' and as such cannot be started/stopped."
-
end
-
end
-
-
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
-
filtered_nodes = nodes.select { |node| node.get_field?(:admin_op_status) =~ Regexp.new("#{status_pattern.to_s}|pending") }
-
if filtered_nodes.size == 0
-
assembly_name = pretty_print_name()
-
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{assembly_name}'."
-
end
-
-
return filtered_nodes, true, nil
-
end
-
-
# TODO: collapse above and below
-
1
def nodes_are_up?(nodes, status_pattern, opts={})
-
-
what = opts[:what]||"Command"
-
# check if staged
-
nodes.each do |node|
-
if node.get_field?(:type) == Node::Type::Node.staged
-
return nodes, false, "#{what} cannot be executed on nodes that are 'staged'."
-
end
-
end
-
-
# check for status -> this will translate to /running|pending/ and /stopped|pending/ checks
-
filtered_nodes = nodes.select { |node| node.get_field?(:admin_op_status) =~ Regexp.new("#{status_pattern.to_s}|pending") }
-
if filtered_nodes.size == 0
-
assembly_name = pretty_print_name()
-
return nodes, false, "There are no #{status_pattern} nodes for assembly '#{pretty_print_name(assembly_name)}'."
-
end
-
-
[filtered_nodes, true, nil]
-
end
-
-
end
-
end
-
end; end; end
-
-
-
1
module DTK
-
1
class Assembly::Instance
-
1
class ServiceLink
-
1
r8_nested_require('service_link','factory')
-
-
1
def initialize(assembly_instance)
-
@assembly_instance = assembly_instance
-
end
-
-
1
def self.delete(port_link_idhs)
-
if port_link_idhs.kind_of?(Array)
-
return if port_link_idhs.empty?
-
else
-
port_link_idhs = [port_link_idhs]
-
end
-
-
aug_attr_links = get_augmented_attribute_links(port_link_idhs)
-
attr_mh = port_link_idhs.first.createMH(:attribute)
-
Model.Transaction do
-
Attribute.update_and_propagate_attributes_for_delete_links(attr_mh,aug_attr_links)
-
port_link_idhs.map{|port_link_idh|Model.delete_instance(port_link_idh)}
-
end
-
end
-
-
1
def self.print_form_hash(object, opts = {})
-
# set the following (some can have nil as legal value)
-
service_type = base_ref = required = description = nil
-
id = object[:id]
-
if object.is_a?(PortLink)
-
port_link = object
-
input_port = print_form_hash__port(port_link[:input_port], port_link[:input_node], opts)
-
output_port = print_form_hash__port(port_link[:output_port], port_link[:output_node], opts)
-
service_type = port_link[:input_port].link_def_name()
-
if service_type != port_link[:output_port].link_def_name()
-
Log.error('input and output link defs are not equal')
-
end
-
# TODO: confusing that input/output on port link does not reflect what is logical input/output
-
if port_link[:input_port][:direction] == 'input'
-
base_ref = input_port
-
dep_ref = output_port
-
else
-
base_ref = output_port
-
dep_ref = input_port
-
end
-
elsif object.is_a?(Port)
-
port = object
-
base_ref = port.display_name_print_form()
-
service_type = port.link_def_name()
-
if link_def = port[:link_def]
-
required = port[:required]
-
description = port[:description]
-
end
-
else
-
raise Error.new("Unexpected object type (#{object.class.to_s})")
-
end
-
-
ret = {
-
:id => id,
-
:type => service_type,
-
:base_component => base_ref
-
}
-
ret.merge!(:dependent_component => dep_ref) if dep_ref
-
ret.merge!(:required => required) if required
-
ret.merge!(:description => description) if description
-
ret
-
end
-
-
1
private
-
1
def self.get_augmented_attribute_links(port_link_idhs)
-
ret = Array.new
-
return ret if port_link_idhs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:port_link_id,:input_id,:output_id,:dangling_link_info],
-
:filter => [:oneof,:port_link_id,port_link_idhs.map{|idh|idh.get_id}]
-
}
-
attribute_link_mh = port_link_idhs.first.createMH(:attribute_link)
-
Model.get_objs(attribute_link_mh,sp_hash)
-
end
-
-
1
def self.print_form_hash__port(port, node, opts = {})
-
port.merge(:node => node).display_name_print_form(opts)
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Assembly::Instance
-
1
class ServiceLink
-
1
class Factory < self
-
1
def initialize(assembly_instance,input_cmp_idh,output_cmp_idh,dependency_name)
-
super(assembly_instance)
-
@dependency_name = dependency_name
-
@input_cmp_idh = input_cmp_idh
-
@output_cmp_idh = output_cmp_idh
-
@input_cmp = input_cmp_idh.create_object()
-
@output_cmp = output_cmp_idh.create_object()
-
end
-
-
1
def add?()
-
port_link = nil
-
input_port,output_port,new_port_created = add_or_ret_ports?()
-
unless new_port_created
-
# see if there is an existing port link
-
# TODO: may also add filter on component_type
-
filter = [:and,[:eq,:input_id,input_port.id()],[:eq,:output_id,output_port.id()]]
-
pl_matches = @assembly_instance.get_port_links(:filter => filter)
-
if pl_matches.size == 1
-
port_link = pl_matches.first
-
elsif pl_matches.size > 1
-
raise Error.new("Unexpected result that matches more than one port link (#{pl_matches.inspect})")
-
end
-
end
-
port_link ||= create_new_port_and_attr_links(input_port,output_port)
-
port_link.id_handle()
-
end
-
-
1
private
-
# returns input_port,output_port,new_port_created (boolean)
-
1
def add_or_ret_ports?()
-
new_port_created = false
-
ndx_matching_ports = find_matching_ports?([@input_cmp_idh,@output_cmp_idh]).inject(Hash.new){|h,p|h.merge(p[:component_id] => p)}
-
unless input_port = ndx_matching_ports[@input_cmp_idh.get_id()]
-
input_port = create_port(:input)
-
new_port_created = true
-
end
-
unless output_port = ndx_matching_ports[@output_cmp_idh.get_id()]
-
output_port = create_port(:output)
-
new_port_created = true
-
end
-
[input_port,output_port,new_port_created]
-
end
-
-
1
def find_matching_ports?(cmp_idhs)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_id],
-
:filter => [:oneof,:component_id,cmp_idhs.map{|idh|idh.get_id()}]
-
}
-
port_mh = cmp_idhs.first.createMH(:port)
-
Model.get_objs(port_mh,sp_hash).select{|p|p.link_def_name() == @dependency_name}
-
end
-
-
1
def create_port(direction)
-
@input_cmp.update_object!(:node_node_id,:component_type)
-
@output_cmp.update_object!(:node_node_id,:component_type)
-
link_def_stub = link_def_stub(direction)
-
component = (direction == :input ? @input_cmp : @output_cmp)
-
node = @assembly_instance.id_handle(:model_name => :node,:id => component[:node_node_id]).create_object()
-
create_hash = Port.ret_port_create_hash(link_def_stub,node,component,:direction => direction.to_s)
-
port_mh = node.child_model_handle(:port)
-
new_port_idh = Model.create_from_rows(port_mh,[create_hash]).first
-
new_port_idh.create_object()
-
end
-
-
1
def link_def_stub(direction)
-
link_def_stub = {:link_type => @dependency_name}
-
if @input_cmp[:node_node_id] == @output_cmp[:node_node_id]
-
link_def_stub[:has_internal_link] = true
-
else
-
link_def_stub[:has_external_link] = true
-
end
-
if direction == :input
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,[:eq,:component_component_id,@input_cmp.id()],
-
[:eq,:link_type,link_def_stub[:link_type]]]
-
}
-
if match = Model.get_obj(@input_cmp.model_handle(:link_def),sp_hash)
-
link_def_stub[:id] = match[:id]
-
else
-
Log.error("Unexpected that input component does not have a matching link def")
-
end
-
end
-
link_def_stub
-
end
-
-
1
def create_new_port_and_attr_links(input_port,output_port)
-
port_link_hash = {
-
:input_id => input_port.id(),
-
:output_id => output_port.id(),
-
}
-
override_attrs = {
-
:assembly_id => @assembly_instance.id()
-
}
-
target = @assembly_instance.get_target()
-
PortLink.create_port_and_attr_links__clone_if_needed(target.id_handle(),port_link_hash,:override_attrs => override_attrs)
-
end
-
-
end
-
end
-
end
-
end
-
-
-
1
module DTK
-
1
class Assembly::Instance
-
1
module ServiceLinkMixin
-
1
def add_service_link?(input_cmp_idh,output_cmp_idh,opts={})
-
dependency_name = find_dep_name_raise_error_if_ambiguous(input_cmp_idh,output_cmp_idh,opts)
-
ServiceLink::Factory.new(self,input_cmp_idh,output_cmp_idh,dependency_name).add?()
-
end
-
-
1
def list_service_links(opts = {})
-
get_opts = Aux.hash_subset(opts, [:filter])
-
pp_opts = Aux.hash_subset(opts, [:context, :hide_assembly_wide_node])
-
get_augmented_port_links(get_opts).map { |r| ServiceLink.print_form_hash(r, pp_opts) } +
-
get_augmented_ports(:mark_unconnected => true).select { |r| r[:unconnected] }.map { |r| ServiceLink.print_form_hash(r, pp_opts) }
-
end
-
-
1
def list_connections__possible()
-
ret = Array.new
-
output_ports = Array.new
-
unc_ports = Array.new
-
get_augmented_ports(:mark_unconnected=>true).each do |r|
-
if r[:direction] == "output"
-
output_ports << r
-
elsif r[:unconnected]
-
unc_ports << r
-
end
-
end
-
return ret if output_ports.nil? or unc_ports.nil?
-
poss_conns = LinkDef.find_possible_connections(unc_ports,output_ports)
-
poss_conns.map do |r|
-
poss_conn = "#{r[:output_port][:id].to_s}:#{r[:output_port].display_name_print_form()}"
-
ServiceLink.print_form_hash(r[:input_port]).merge(:possible_connection => poss_conn)
-
end.sort{|a,b|a[:service_ref] <=> b[:service_ref]}
-
end
-
-
1
private
-
1
def find_dep_name_raise_error_if_ambiguous(input_cmp_idh,output_cmp_idh,opts={})
-
input_cmp = input_cmp_idh.create_object()
-
output_cmp = output_cmp_idh.create_object()
-
matching_link_defs = LinkDef.get_link_defs_matching_antecendent(input_cmp,output_cmp)
-
matching_link_types = matching_link_defs.map{|ld|ld.get_field?(:link_type)}.uniq
-
-
input_cmp_name = input_cmp.component_type_print_form()
-
output_cmp_name = output_cmp.component_type_print_form()
-
-
if dep_name = opts[:dependency_name]
-
if matching_link_types.include?(dep_name)
-
dep_name
-
else
-
raise ErrorUsage.new("Specified dependency name (#{dep_name}) does not match any of the dependencies defined between component type (#{input_cmp_name}) and component type (#{output_cmp_name}): #{matching_link_types.join(',')}")
-
end
-
elsif matching_link_types.size == 1
-
matching_link_types.first
-
elsif matching_link_types.empty?
-
raise ErrorUsage.new("There are no dependencies defined between component type (#{input_cmp_name}) and component type (#{output_cmp_name})")
-
else #matching_link_types.size > 1
-
raise ErrorUsage.new("Ambiguous which dependency between component type (#{input_cmp_name}) and component type (#{output_cmp_name}) selected; select one of #{matching_link_types.join(',')})")
-
end
-
end
-
end
-
end
-
end
-
-
1
module DTK
-
1
class ServiceSetting < Model
-
1
r8_nested_require('service_setting','array')
-
1
r8_nested_require('service_setting','attribute_settings')
-
1
r8_nested_require('service_setting','node_bindings')
-
-
1
def self.common_columns()
-
[
-
:id,
-
:display_name,
-
:group_id,
-
:node_bindings,
-
:attribute_settings
-
]
-
end
-
-
1
def bind_parameters!(hash_params)
-
reify!()
-
apply_to_field?(:attribute_settings){|settings|settings.bind_parameters!(hash_params)}
-
end
-
-
1
def apply_setting(target,assembly)
-
reify!()
-
apply_to_field?(:attribute_settings){|settings|settings.apply_settings(assembly)}
-
apply_to_field?(:node_bindings){|node_bindings|node_bindings.set_node_bindings(target,assembly)}
-
end
-
-
1
def reify!()
-
reify_field!(:attribute_settings,AttributeSettings)
-
reify_field!(:node_bindings,NodeBindings)
-
end
-
1
private
-
1
def apply_to_field?(field,&block)
-
if content = self[field]
-
block.call(content)
-
end
-
end
-
-
1
def reify_field!(field,klass)
-
if content = self[field]
-
unless content.kind_of?(klass)
-
self[field] = klass.new(content)
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceSetting
-
1
class Array < ::Array
-
1
def apply_settings(target,assembly)
-
each{|setting|setting.apply_setting(target,assembly)}
-
end
-
end
-
end
-
end
-
-
1
module DTK
-
1
class ServiceSetting
-
1
class AttributeSettings < Array
-
1
r8_nested_require('attribute_settings','hash_form')
-
1
r8_nested_require('attribute_settings','element')
-
-
1
def initialize(settings_hash={})
-
super()
-
unless settings_hash.empty?
-
self.class.each_element(settings_hash){|el| self << el}
-
end
-
end
-
-
1
def bind_parameters!(hash_params)
-
# TODO: would be more efficient probably to apply these all at once raher than per element
-
each{|el|el.bind_parameters!(hash_params)}
-
end
-
-
1
def self.apply_using_settings_hash(assembly,settings_hash)
-
attr_settings = new(settings_hash)
-
# get all existing attributes to find just the diffs
-
existing_attr_settings = all_assemblies_attribute_settings(assembly)
-
pruned_attr_settings = attr_settings.ret_just_diffs(existing_attr_settings)
-
unless pruned_attr_settings.empty?
-
pruned_attr_settings.apply_settings(assembly)
-
end
-
end
-
-
1
def self.each_element(settings_hash,attr_prefix=nil,&block)
-
HashForm.each_element(settings_hash,attr_prefix,&block)
-
end
-
-
1
def apply_settings(assembly)
-
av_pairs = map{|el|el.av_pair_form()}
-
opts_set = {:partial_value => false,:create=>[:node_level,:assembly_level]}
-
assembly.set_attributes(av_pairs,opts_set)
-
end
-
-
1
def ret_just_diffs(existing_attr_settings)
-
ret = self.class.new()
-
ndx_attr_settings = existing_attr_settings.inject(Hash.new) do |h,el|
-
h.merge(el.unique_index() => el)
-
end
-
each do |el|
-
match = ndx_attr_settings[el.unique_index()]
-
unless match and el.equal_value?(match)
-
ret << el
-
end
-
end
-
ret
-
end
-
-
1
private
-
1
def self.all_assemblies_attribute_settings(assembly,filter_proc=nil)
-
new(HashForm.render(assembly.get_attributes_all_levels_struct(filter_proc)))
-
end
-
end
-
end
-
end
-
1
require 'erubis'
-
2
module DTK; class ServiceSetting
-
1
class AttributeSettings
-
1
class Element
-
1
attr_reader :raw_value
-
1
def initialize(attribute_path,raw_value)
-
@attribute_path = attribute_path
-
@raw_value = raw_value
-
end
-
-
1
def bind_parameters!(hash_params)
-
# TODO: need alot more checking also making sure no unbound attribute
-
ret = self
-
unless @raw_value.kind_of?(String)
-
return ret
-
end
-
eruby = ::Erubis::Eruby.new(@raw_value)
-
begin
-
@raw_value = eruby.result(hash_params)
-
rescue Exception => e
-
Log.error("The following erubis error resulted from service setting bindings: #{e.inspect}")
-
params_print = hash_params.inject(String.new) do |s,(k,v)|
-
av = "#{k}=>#{v}"
-
s.empty? ? av : "#{s},#{av}"
-
end
-
raise ErrorUsage.new("Error in applying setting parameters (#{params_print}) to attribute (#{@attribute_path}) with value (#{@raw_value}")
-
end
-
ret
-
end
-
-
1
def av_pair_form()
-
{:pattern => @attribute_path,:value => value()}
-
end
-
-
1
def value()
-
RawValue.value(@raw_value)
-
end
-
-
1
def equal_value?(el)
-
RawValue.equal?(@raw_value,el.raw_value)
-
end
-
-
1
def unique_index()
-
@attribute_path
-
end
-
-
1
module RawValue
-
1
def self.value(val)
-
(val.kind_of?(::Hash) or val.kind_of?(::Array)) ? val : val.to_s
-
end
-
-
1
def self.equal?(val1,val2)
-
unless val1.class == val2.class
-
return false
-
end
-
if val1.kind_of?(::Hash)
-
return false unless Aux.equal_sets(val1.keys,val2.keys)
-
val1.each_pair do |key,val_val1|
-
return false unless equal?(val_val1,val2[key])
-
end
-
true
-
elsif val1.kind_of?(::Array)
-
return false unless val1.size == val2.size
-
val1.each_with_index do |el_val1,i|
-
return false unless equal?(el_val1,val2[i])
-
end
-
true
-
else
-
val1 == val2
-
end
-
end
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class ServiceSetting
-
1
class AttributeSettings
-
1
class HashForm < self
-
1
def self.render(all_attrs_struct)
-
render_in_hash_form(all_attrs_struct)
-
end
-
-
1
private
-
1
ContextDelim = '/'
-
1
def self.each_element(settings_hash,attr_prefix=nil,&block)
-
settings_hash.each_pair do |key,body|
-
if key =~ Regexp.new("(^.+)#{ContextDelim}$")
-
attr_part = $1
-
nested_attr_prefix = compose_attr(attr_prefix,attr_part)
-
if body.kind_of?(Hash)
-
each_element(body,nested_attr_prefix,&block)
-
else
-
Log.error_pp(["Unexpected form in AttributeSettings::HashForm.each_element:",key,body, "ignoring; should be caught in better parsing of settings"])
-
end
-
else
-
attr = compose_attr(attr_prefix,key)
-
value = body
-
block.call(Element.new(attr,value))
-
end
-
end
-
end
-
-
1
AttrPartDelim = '/'
-
1
def self.compose_attr(attr_prefix,attr_part)
-
attr_prefix ? "#{attr_prefix}#{AttrPartDelim}#{attr_part}" : attr_part.to_s
-
end
-
-
1
def self.render_in_hash_form(all_attrs_struct)
-
# merge the node and component attributes in a nested structure
-
ndx_attrs = Hash.new
-
all_attrs_struct.node_attrs.each do |node_attr|
-
# do not display node_attributes for assembly_wide node
-
next if node_attr[:node][:type].eql?('assembly_wide')
-
-
node_info = ndx_attrs[node_attr[:node][:display_name]]||= {:attrs => Hash.new,:cmps => Hash.new}
-
node_info[:attrs].merge!(node_attr[:display_name] => attribute_value(node_attr))
-
end
-
all_attrs_struct.component_attrs.each do |cmp_attr|
-
node_info = ndx_attrs[cmp_attr[:node][:display_name]]||= {:attrs => Hash.new,:cmps => Hash.new}
-
cmp_print_name = cmp_attr[:nested_component].display_name_print_form()
-
cmp_info = node_info[:cmps][cmp_print_name] ||= Hash.new
-
cmp_info.merge!(cmp_attr[:display_name] => attribute_value(cmp_attr))
-
end
-
-
# put assembly attributes in ret
-
ret = all_attrs_struct.assembly_attrs.sort{|a,b|a[:display_name] <=> b[:display_name]}.inject(SimpleOrderedHash.new) do |h,attr|
-
h.merge(attr[:display_name] => attribute_value(attr))
-
end
-
-
# put node and component attributes in ret
-
ndx_attrs.keys.sort().each do |node_name|
-
is_assembly_wide = all_attrs_struct.node_attrs.find{|node| node[:node][:type].eql?('assembly_wide')} if node_name.eql?('assembly_wide')
-
-
if is_assembly_wide
-
ret_node_pntr = ret['components'] = SimpleOrderedHash.new
-
else
-
ret['nodes'] ||= {}
-
ret_node_pntr = ret['nodes']["#{node_name}#{ContextDelim}"] = SimpleOrderedHash.new
-
end
-
-
node_info = ndx_attrs[node_name]
-
node_info[:attrs].keys.sort.each do |attr_name|
-
ret_node_pntr['attributes'] ||= {}
-
ret_node_pntr['attributes'].merge!(attr_name => node_info[:attrs][attr_name])
-
end
-
-
node_info[:cmps].keys.sort.each do |cmp_name|
-
if is_assembly_wide
-
ret_cmp_pntr = ret_node_pntr["#{cmp_name}#{ContextDelim}"] = SimpleOrderedHash.new
-
else
-
ret_node_pntr['components'] ||= {}
-
ret_cmp_pntr = ret_node_pntr['components']["#{cmp_name}#{ContextDelim}"] = SimpleOrderedHash.new
-
end
-
cmp_info = node_info[:cmps][cmp_name]
-
ret_cmp_pntr['attributes'] ||= {}
-
cmp_info.keys.sort.each do |attr_name|
-
ret_cmp_pntr['attributes'].merge!(attr_name => cmp_info[attr_name])
-
end
-
end
-
end
-
return ret unless ret['components']
-
-
# put assembly wide components on top
-
cmps = ret.delete('components')
-
ndx_ret = {'components' => cmps}.merge(ret)
-
end
-
-
1
def self.attribute_value(attr)
-
attr.convert_value_to_ruby_object()
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class ServiceSetting
-
1
class NodeBindings < Array
-
1
def initialize(content)
-
super()
-
self.class.each_element(content){|el|self << el}
-
end
-
-
1
def set_node_bindings(target,assembly)
-
hash_content = inject(Hash.new){|h,el|h.merge(el.hash_form)}
-
::DTK::NodeBindings::DSL.set_node_bindings(target,assembly,hash_content)
-
end
-
-
1
private
-
1
def self.each_element(content,&block)
-
content.each_pair do |assembly_node,node_target|
-
block.call(Element.new(assembly_node,node_target))
-
end
-
end
-
-
1
class Element
-
1
attr_reader :assembly_node
-
1
def initialize(assembly_node,node_target)
-
@assembly_node = assembly_node
-
@node_target = node_target
-
end
-
1
def hash_form()
-
{@assembly_node => @node_target}
-
end
-
end
-
end
-
end
-
end
-
3
module DTK; class Assembly; class Instance
-
1
class Update
-
1
def initialize(assembly_idh)
-
@assembly_idh = assembly_idh
-
end
-
-
1
def assembly_instance()
-
@assembly_idh.create_object()
-
end
-
-
1
class Node < self
-
1
class Add < self
-
end
-
1
class Delete < self
-
end
-
end
-
-
1
class Component < self
-
1
class Add < self
-
end
-
1
class Delete < self
-
end
-
end
-
-
end
-
end; end; end
-
1
module DTK
-
1
class Assembly::Instance
-
1
TARGET_BUILTIN_NODE_LIMIT = R8::Config[:dtk][:target][:builtin][:node_limit].to_i
-
-
1
module ViolationMixin
-
1
def find_violations()
-
nodes_and_cmps = get_info__flat_list(:detail_level => "components").select{|r|r[:nested_component]}
-
cmps = nodes_and_cmps.map{|r|r[:nested_component]}
-
-
unset_attr_viols = find_violations__unset_attrs()
-
cmp_constraint_viols = find_violations__cmp_constraints(nodes_and_cmps,cmps.map{|cmp|cmp.id_handle()})
-
cmp_parsing_errors = find_violations__cmp_parsing_error(cmps)
-
unconn_req_service_refs = find_violations__unconn_req_service_refs()
-
mod_refs_viols = find_violations__module_refs(cmps)
-
num_of_target_nodes = find_violations__num_of_target_nodes()
-
-
unset_attr_viols + cmp_constraint_viols + unconn_req_service_refs + mod_refs_viols + cmp_parsing_errors + num_of_target_nodes
-
end
-
1
private
-
1
def find_violations__unset_attrs()
-
filter_proc = lambda{|a|a.required_unset_attribute?()}
-
assembly_attr_viols = get_assembly_level_attributes(filter_proc).map{|a|Violation::ReqUnsetAttr.new(a,:assembly)}
-
filter_proc = lambda{|r|r[:attribute].required_unset_attribute?()}
-
component_attr_viols = get_augmented_nested_component_attributes(filter_proc).map{|a|Violation::ReqUnsetAttr.new(a,:component)}
-
-
node_attributes = get_augmented_node_attributes(filter_proc)
-
# remove attribute violations if assembly wide node
-
node_attributes.delete_if do |n_attr|
-
if node = n_attr[:node]
-
node[:type].eql?('assembly_wide')
-
end
-
end
-
node_attr_viols = node_attributes.map{|a|Violation::ReqUnsetAttr.new(a,:node)}
-
-
assembly_attr_viols + component_attr_viols + node_attr_viols
-
end
-
-
1
def find_violations__cmp_constraints(nodes_and_cmps,cmp_idhs)
-
ret = Array.new
-
return ret if cmp_idhs.empty?
-
ndx_constraints = Component.get_ndx_constraints(cmp_idhs,:when_evaluated => :after_cmp_added)
-
# TODO: this is expensive in that it makes query for each constraint
-
nodes_and_cmps.each do |r|
-
if constraint_info = ndx_constraints[r[:nested_component][:id]]
-
constraint_scope = {"target_node_id_handle" => r[:node].id_handle()}
-
constraint_info[:constraints].each do |constraint|
-
unless constraint.evaluate_given_target(constraint_scope)
-
ret << Violation::ComponentConstraint.new(constraint,r[:node])
-
end
-
end
-
end
-
end
-
ret
-
end
-
-
1
def find_violations__unconn_req_service_refs()
-
ret = Array.new
-
get_augmented_ports(:mark_unconnected=>true).each do |aug_port|
-
if aug_port[:unconnected] and aug_port[:link_def][:required]
-
ret << Violation::UnconnReqServiceRef.new(aug_port)
-
end
-
end
-
ret
-
end
-
-
1
def find_violations__cmp_parsing_error(cmps)
-
ret = Array.new
-
return ret if cmps.empty?
-
-
cmps.each do |cmp|
-
cmp_module_branch = get_parsed_info(cmp[:module_branch_id], "ComponentBranch")
-
if cmp_module_branch && cmp_module_branch[:component_module]
-
ret << Violation::ComponentParsingError.new(cmp_module_branch[:component_module][:display_name], "Component") unless cmp_module_branch[:dsl_parsed]
-
end
-
end
-
-
if service_module_branch = get_parsed_info(self[:module_branch_id], "ServiceBranch")
-
ret << Violation::ComponentParsingError.new(service_module_branch[:service_module][:display_name], "Service") unless service_module_branch[:dsl_parsed]
-
end
-
-
# if module_branch belongs to service instance assembly_module_version? will not be nil
-
assembly_branch = AssemblyModule::Service.get_assembly_branch(self)
-
if assembly_branch.assembly_module_version?
-
# add violation if module_branch[:dsl_parsed] == false
-
ret << Violation::ComponentParsingError.new(self[:display_name], "Service instance") unless assembly_branch[:dsl_parsed]
-
end
-
-
ret
-
end
-
-
1
def find_violations__module_refs(cmps)
-
ret = missing = Array.new
-
multiple_ns = Hash.new
-
return ret if cmps.empty?
-
-
begin
-
module_refs_tree = ModuleRefs::Tree.create(self,:components => cmps)
-
rescue ErrorUsage => e
-
ret << Violation::HasItselfAsDependency.new(e.message)
-
return ret
-
end
-
-
missing, multiple_ns = module_refs_tree.violations?
-
-
unless missing.empty?
-
missing.each do |k,v|
-
ret << Violation::MissingIncludedModule.new(k,v)
-
end
-
end
-
-
unless multiple_ns.empty?
-
multiple_ns.each do |k,v|
-
ret << Violation::MultipleNamespacesIncluded.new(k,v)
-
end
-
end
-
-
ret
-
end
-
-
1
def find_violations__num_of_target_nodes()
-
ret = Array.new
-
-
target_idh = self.get_target().id_handle()
-
target = target_idh.create_object(:model_name => :target_instance)
-
-
# check if allowed number of nodes is exceeded (only for builtin target)
-
if target.is_builtin_target?
-
new_nodes, current_nodes = [], []
-
-
self.get_leaf_nodes().each do |l_node|
-
# we need only nodes that are currently not running
-
new_nodes << l_node unless l_node[:admin_op_status] == 'running'
-
end
-
-
# running target nodes
-
current_nodes = target.get_target_running_nodes()
-
new_nodes_size = new_nodes.size
-
current_nodes_size = current_nodes.size
-
ret << Violation::NodesLimitExceeded.new(new_nodes_size, current_nodes_size) if (current_nodes_size + new_nodes_size) > TARGET_BUILTIN_NODE_LIMIT
-
end
-
-
ret
-
end
-
-
1
def get_parsed_info(module_branch_id, type)
-
ret = nil
-
cols = [:id, :type, :component_id, :service_id, :dsl_parsed]
-
-
if type.to_s.eql?("ComponentBranch")
-
cols << :component_module_info
-
elsif type.to_s.eql?("ServiceBranch")
-
cols << :service_module
-
end
-
-
sp_hash = {
-
:cols => cols,
-
:filter => [:eq, :id, module_branch_id]
-
}
-
unless branch = Model.get_obj(model_handle(:module_branch),sp_hash)
-
return ret
-
end
-
-
return branch if type.to_s.eql?("ComponentBranch") || type.to_s.eql?("ServiceBranch")
-
-
if (type == "Component")
-
sp_cmp_hash = {
-
:cols => [:id, :display_name, :dsl_parsed],
-
:filter => [:eq, :id, branch[:component_id]]
-
}
-
Model.get_obj(model_handle(:component_module),sp_cmp_hash)
-
else
-
sp_cmp_hash = {
-
:cols => [:id, :display_name, :dsl_parsed],
-
:filter => [:eq, :id, branch[:service_id]]
-
}
-
Model.get_obj(model_handle(:service_module),sp_cmp_hash)
-
end
-
end
-
-
end
-
-
1
class Violation
-
1
class ReqUnsetAttr < self
-
1
def initialize(attr,type)
-
@attr_display_name = attr.print_form(Opts.new(:level=>type))[:display_name]
-
end
-
1
def type()
-
:required_unset_attribute
-
end
-
1
def description()
-
"Attribute (#{@attr_display_name}) is required, but unset"
-
end
-
end
-
1
class ComponentConstraint < self
-
1
def initialize(constraint,node)
-
@constraint = constraint
-
@node = node
-
end
-
1
def type()
-
:component_constraint
-
end
-
1
def description()
-
"On assembly node (#{@node[:display_name]}): #{@constraint[:description]}"
-
end
-
end
-
1
class UnconnReqServiceRef < self
-
1
def initialize(aug_port)
-
@augmented_port = aug_port
-
end
-
1
def type()
-
:unmet_dependency
-
end
-
1
def description()
-
"Component (#{@augmented_port.display_name_print_form()}) has an unmet dependency"
-
end
-
end
-
1
class ComponentParsingError < self
-
1
def initialize(component, type)
-
@component = component
-
@type = type
-
end
-
1
def type()
-
:parsing_error
-
end
-
1
def description()
-
"#{@type} '#{@component}' has syntax errors in DSL files."
-
end
-
end
-
1
class MissingIncludedModule < self
-
1
def initialize(included_module, namespace, version = nil)
-
@included_module = included_module
-
@namespace = namespace
-
@version = version
-
end
-
1
def type()
-
:missing_included_module
-
end
-
1
def description()
-
full_name = "#{@namespace}:#{@included_module}"
-
"Module '#{full_name}#{@version.nil? ? '' : '-'+@version}' is included in dsl, but not installed. Use 'print-includes' to see more details."
-
end
-
end
-
1
class MultipleNamespacesIncluded < self
-
1
def initialize(included_module, namespaces)
-
@included_module = included_module
-
@namespaces = namespaces
-
end
-
1
def type()
-
:mapped_to_multiple_namespaces
-
end
-
1
def description()
-
"Module '#{@included_module}' included in dsl is mapped to multiple namespaces: #{@namespaces.join(', ')}. Use 'print-includes' to see more details."
-
end
-
end
-
1
class HasItselfAsDependency < self
-
1
def initialize(message)
-
@message = message
-
end
-
1
def type()
-
:has_itself_as_dependency
-
end
-
1
def description()
-
@message
-
end
-
end
-
-
1
class NodesLimitExceeded < self
-
1
def initialize(new_nodes, running)
-
@new = new_nodes
-
@running = running
-
end
-
1
def type()
-
:nodes_limit_exceeded
-
end
-
1
def description()
-
"There are #{@running} nodes currently running in builtin target. Unable to create #{@new} new nodes beacuse it will exceed number of nodes allowed in builtin target (#{TARGET_BUILTIN_NODE_LIMIT})"
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Assembly
-
1
module ListMixin
-
1
def info(node_id=nil, component_id=nil, attribute_id=nil, opts={})
-
is_template = kind_of?(Template)
-
opts.merge!(:is_template => true) if is_template
-
-
nested_virtual_attr = (is_template ? :template_nodes_and_cmps_summary : :instance_nodes_and_cmps_summary)
-
sp_hash = {
-
:cols => [:id, :display_name,:component_type,nested_virtual_attr]
-
}
-
assembly_rows = get_objs(sp_hash)
-
Instance.get_last_task_run_status(assembly_rows,model_handle())
-
-
if (node_id.to_s.empty? && component_id.to_s.empty? && attribute_id.to_s.empty?)
-
nodes_info = (is_template ? get_nodes() : get_nodes__expand_node_groups({:remove_node_groups => true}))
-
nodes_info.reject!{|n| n[:type].eql?('assembly_wide')} if opts[:remove_assembly_wide_node]
-
assembly_rows.first[:nodes] = nodes_info.sort{|a,b| a[:display_name] <=> b[:display_name] }
-
end
-
-
# filter nodes by node_id if node_id is provided in request
-
unless (node_id.nil? || node_id.empty?)
-
sp_hash = {
-
:cols => [:id,:display_name,:admin_op_status,:os_type,:external_ref,:type,:ordered_component_ids],
-
:filter => [:and, [:eq, :id, node_id]]
-
}
-
node = Model.get_obj(model_handle(:node),sp_hash)
-
assembly_rows.first[:node] = node
-
-
assembly_rows = assembly_rows.select { |node| node[:node][:id] == node_id.to_i }
-
opts.merge!(:component_info => true)
-
end
-
-
# filter nodes by component_id if component_id is provided in request
-
unless (component_id.nil? || component_id.empty?)
-
assembly_rows = assembly_rows.select { |node| node[:nested_component][:id] == component_id.to_i }
-
opts.merge!(:component_info => true, :attribute_info => true)
-
end
-
-
# load attributes for assembly
-
attr_rows = self.class.get_default_component_attributes(model_handle(), assembly_rows)
-
-
# filter attributes by attribute_name if attribute_name is provided in request
-
if attribute_id
-
attr_rows.reject! { |attr| attr[:id] != attribute_id.to_i }
-
end
-
-
# reconfigure response fields that will be returned to the client
-
opts_list = {:print_form=>true, :sanitize=>true}.merge(opts)
-
-
if kind_of?(Instance)
-
assembly_templates = assembly_rows.map{|a|a[:assembly_template] unless Workspace.is_workspace?(a)}.compact
-
unless assembly_templates.empty?
-
Template.augment_with_namespaces!(assembly_templates)
-
opts_list[:include_namespaces] ||= true
-
end
-
end
-
-
ret = self.class.list_aux(assembly_rows,attr_rows, opts_list).first
-
if kind_of?(Template)
-
[:op_status,:last_task_run_status].each{|k|ret.delete(k)}
-
end
-
-
ret[:nodes].each do |node|
-
node.reject!{|k|![:display_name,:node_properties,:components].include?(k)}
-
end
-
-
# TODO: temp until get removes this attribute
-
ret.delete(:execution_status)
-
ret
-
end
-
-
1
def pretty_print_name(opts={})
-
self.class.pretty_print_name(self,opts={})
-
end
-
-
end
-
-
1
module ListClassMixin
-
1
def list_aux(assembly_rows,attr_rows=[],opts={})
-
ndx_attrs = Hash.new
-
-
if opts[:attribute_info]
-
attr_rows.each do |attr|
-
if (attr[:attribute_value] && !attr[:attribute_value].empty?)
-
(ndx_attrs[attr[:component_component_id]] ||= Array.new) << attr
-
end
-
end
-
end
-
-
ndx_ret = Hash.new
-
pp_opts = Aux.hash_subset(opts,[:no_module_prefix])
-
assembly_template_opts = {:version_suffix => true}
-
if opts[:include_namespaces]
-
assembly_template_opts.merge!(:include_namespace => true, :service_module_context_path => true)
-
end
-
assembly_rows.each do |r|
-
last_task_run_status = r[:last_task_run_status]
-
pntr = ndx_ret[r[:id]] ||= r.prune_with_values(
-
:display_name => r.pretty_print_name(pp_opts),
-
:last_task_run_status => last_task_run_status,
-
# TODO: will deprecate :execution_status after removing it from smoketests
-
:execution_status => last_task_run_status||'staged',
-
:ndx_nodes => Hash.new
-
)
-
-
if module_branch_id = r[:module_branch_id]
-
pntr[:module_branch_id] ||= module_branch_id
-
end
-
-
if target = r[:target]
-
sec_group_set = target[:iaas_properties][:security_group_set]
-
target[:iaas_properties][:security_group] ||= sec_group_set.join(',') if sec_group_set
-
pntr[:target] ||= target[:display_name]
-
opts.merge!(:target => target)
-
end
-
-
if version = pretty_print_version(r)
-
pntr.merge!(:version => version)
-
end
-
-
if template = r[:assembly_template]
-
# just triggers for assembly instances; indicates the assembly template that spawned it
-
pntr.merge!(:assembly_template => Template.pretty_print_name(template,assembly_template_opts))
-
end
-
-
if created_at = r[:created_at]
-
pntr.merge!(:created_at => created_at)
-
end
-
-
if node = format_node!(pntr[:ndx_nodes],r[:node],opts)
-
format_components_and_attributes(node,r,ndx_attrs,opts)
-
end
-
-
# if node group take only group members
-
if r[:node] && r[:node].is_node_group?() && !opts[:is_template]
-
r[:nodes] = r.get_nodes__expand_node_groups({:remove_node_groups => true, :add_group_member_components => true}) unless opts[:only_node_group_info]
-
r[:nodes].sort!{|a,b| a[:display_name] <=> b[:display_name] }
-
opts.merge!(:add_group_member_components => true)
-
end
-
-
if r[:nodes]
-
r[:nodes].each do |n|
-
format_node!(pntr[:ndx_nodes],n,opts)
-
process_node_group_memeber_components(pntr[:ndx_nodes],n,opts) if opts[:add_group_member_components]
-
end
-
end
-
end
-
-
unsorted = ndx_ret.values.map do |r|
-
nodes = r[:ndx_nodes].values
-
nodes.reject!{|n| n[:type].eql?('assembly_wide')} if opts[:remove_assembly_wide_node]
-
op_status = (op_status(nodes) if respond_to?(:op_status))
-
r.merge(:op_status => op_status,:nodes => nodes).slice(:id,:display_name,:op_status,:last_task_run_status,:execution_status,:module_branch_id,:version,:assembly_template,:target,:nodes,:created_at,:keypair,:security_groups)
-
end
-
-
sanitize!(unsorted) if opts[:sanitize]
-
-
unsorted.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
private
-
1
def sanitize!(output)
-
output.each do |assembly|
-
(assembly[:nodes]||[]).each{|node_hash|Node.sanitize!(node_hash)}
-
end
-
end
-
-
1
def list_aux__component_template(r)
-
r[:component_template]||r[:nested_component]||{}
-
end
-
-
# format node adds :node_properties and empty array to ndx_nodes
-
1
def format_node!(ndx_nodes,raw_node,opts=Hash.new)
-
if raw_node
-
target = opts[:target]
-
node_name = raw_node[:display_name]
-
external_ref = nil
-
-
format_current_node = (!raw_node.is_node_group?() || opts[:only_node_group_info])
-
if ndx_nodes[node_name].nil? && (format_current_node || opts[:is_template]) #!raw_node.is_node_group?()
-
if node_ext_ref = raw_node[:external_ref]
-
external_ref = node_external_ref_print_form(node_ext_ref,opts)
-
# remove :git_authorized
-
external_ref = external_ref.inject(Hash.new) do |h,(k,v)|
-
k == :git_authorized ? h : h.merge(k => v)
-
end
-
end
-
-
node_properties = {
-
:node_id => raw_node[:id],
-
:os_type => raw_node[:os_type],
-
:admin_op_status => raw_node[:admin_op_status]
-
}
-
node_properties.merge!(external_ref) if external_ref
-
-
if target
-
iaas_properties = target[:iaas_properties]
-
node_properties[:keypair] ||= iaas_properties[:keypair]
-
# substitute node[:security_group] or node[:security_group_set] with node[:security_groups]
-
check_node_security_groups!(node_properties)
-
node_properties[:security_groups] ||= iaas_properties[:security_group]
-
end
-
-
node_properties.reject!{|k,v|v.nil?}
-
ndx_nodes[node_name] = raw_node.merge(:components => Array.new, :node_properties => node_properties)
-
end
-
-
ndx_nodes[node_name]
-
end
-
end
-
-
1
def process_node_group_memeber_components(ndx_nodes,raw_node,opts=Hash.new)
-
if components = raw_node[:components]
-
cmp_names = components.map{|cmp| cmp[:display_name]}
-
node_name = raw_node[:display_name]
-
ndx_nodes[node_name].merge!(:components => cmp_names)
-
end
-
end
-
-
# substitute node[:security_group] or node[:security_group_set] with node[:security_groups]
-
# not deleting any keys just changing the name
-
1
def check_node_security_groups!(node_properties)
-
if security_group = node_properties.delete(:security_group)
-
node_properties[:security_groups] = security_group
-
elsif security_group_set = node_properties.delete(:security_group_set)
-
node_properties[:security_groups] = security_group_set.join(',')
-
end
-
end
-
-
1
def node_external_ref_print_form(node_ext_ref,opts=Hash.new)
-
ret = node_ext_ref.class.new()
-
has_print_form = opts[:print_form]
-
node_ext_ref.each_pair do |k,v|
-
if [:secret,:key].include?(k)
-
# omit
-
elsif not has_print_form
-
ret[k] = v
-
else
-
if [:dns_name].include?(k)
-
# no op
-
elsif k == :private_dns_name and v.kind_of?(Hash)
-
ret[k] = v.values.first
-
else
-
ret[k] = v
-
end
-
end
-
end
-
ret
-
end
-
-
1
def format_components_and_attributes(node,raw_row,ndx_attrs,opts)
-
cmp_hash = list_aux__component_template(raw_row)
-
if cmp_type = cmp_hash[:component_type] && cmp_hash[:component_type].gsub(/__/,"::")
-
cmp =
-
if opts[:component_info]
-
version = ModuleBranch.version_from_version_field(cmp_hash[:version])
-
{
-
:component_name => cmp_type,
-
:component_id => cmp_hash[:id],
-
:basic_type => cmp_hash[:basic_type],
-
:description => cmp_hash[:description],
-
:version => version
-
}
-
elsif not ndx_attrs.empty?
-
{:component_name => cmp_type}
-
else
-
cmp_type
-
end
-
-
if attrs = ndx_attrs[list_aux__component_template(raw_row)[:id]]
-
processed_attrs = attrs.map do |attr|
-
proc_attr = {:attribute_name => attr[:display_name], :value => attr[:attribute_value]}
-
proc_attr[:override] = true if attr[:is_instance_value]
-
proc_attr
-
end
-
cmp.merge!(:attributes => processed_attrs) if cmp.kind_of?(Hash)
-
end
-
node[:components] << cmp
-
end
-
node[:components]
-
end
-
end
-
end
-
end
-
2
module DTK; class Assembly
-
1
class Template < self
-
1
r8_nested_require('template','factory')
-
1
r8_nested_require('template','list')
-
1
r8_nested_require('template','pretty_print')
-
1
include PrettyPrint::Mixin
-
1
extend PrettyPrint::ClassMixin
-
-
1
def get_objs(sp_hash,opts={})
-
super(sp_hash,opts.merge(:model_handle => model_handle().createMH(:assembly_template)))
-
end
-
1
def self.get_objs(mh,sp_hash,opts={})
-
if mh[:model_name] == :assembly_template
-
get_these_objs(mh,sp_hash,opts)
-
else
-
super
-
end
-
end
-
-
1
def self.create_from_id_handle(idh)
-
idh.create_object(:model_name => :assembly_template)
-
end
-
-
1
def stage(target,opts={})
-
service_module = get_service_module()
-
-
# unless is_dsl_parsed = service_module.dsl_parsed?()
-
service_module_branch = service_module.get_workspace_module_branch()
-
unless is_dsl_parsed = service_module_branch.dsl_parsed?()
-
raise ErrorUsage.new("An assembly template from an unparsed service-module ('#{service_module}') cannot be staged")
-
end
-
-
# including :description here because it is not a field that gets copied by clone copy processor
-
override_attrs = {:description => get_field?(:description)}
-
if assembly_name = opts[:assembly_name]
-
override_attrs[:display_name] = assembly_name
-
end
-
-
clone_opts = {:ret_new_obj_with_cols => [:id,:type]}
-
if settings = opts[:service_settings]
-
clone_opts.merge!(:service_settings => settings)
-
end
-
-
new_assembly_obj = nil
-
Transaction do
-
new_assembly_obj = target.clone_into(self,override_attrs,clone_opts)
-
end
-
-
Assembly::Instance.create_subclass_object(new_assembly_obj)
-
end
-
-
1
def self.create_or_update_from_instance(project, assembly_instance, service_module_name, assembly_template_name, opts = {})
-
namespace = opts[:namespace] || Namespace.default_namespace_name
-
opts.merge!(:namespace => namespace)
-
-
service_module = Factory.get_or_create_service_module(project, service_module_name, opts)
-
Factory.create_or_update_from_instance(assembly_instance, service_module, assembly_template_name, opts)
-
-
service_module_branch = service_module.get_workspace_module_branch()
-
service_module_branch.set_dsl_parsed!(true)
-
-
service_module
-
end
-
-
### standard get methods
-
1
def get_nodes(opts={})
-
self.class.get_nodes([id_handle()],opts)
-
end
-
1
def self.get_nodes(assembly_idhs,opts={})
-
ret = Array.new
-
return ret if assembly_idhs.empty?()
-
sp_hash = {
-
:cols => opts[:cols]||[:id, :group_id, :display_name, :assembly_id],
-
:filter => [:oneof, :assembly_id, assembly_idhs.map{|idh|idh.get_id()}]
-
}
-
node_mh = assembly_idhs.first.createMH(:node)
-
get_objs(node_mh,sp_hash)
-
end
-
-
1
def self.get_ndx_assembly_names_to_ids(project_idh,service_module,assembly_names)
-
ndx_assembly_refs = assembly_names.inject(Hash.new){|h,n|h.merge(n => service_module.assembly_ref(n))}
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:ref],
-
:filter => [:and,[:eq,:project_project_id,project_idh.get_id],[:oneof,:ref,ndx_assembly_refs.values]]
-
}
-
assembly_templates = get_objs(project_idh.createMH(:component),sp_hash,:keep_ref_cols => true)
-
ndx_ref_ids = assembly_templates.inject(Hash.new){|h,r|h.merge(r[:ref] => r[:id])}
-
ndx_assembly_refs.inject(Hash.new) do |h,(name,ref)|
-
id = ndx_ref_ids[ref]
-
id ? h.merge(name => id) : h
-
end
-
end
-
-
1
def self.augment_with_namespaces!(assembly_templates)
-
1
ndx_namespaces = get_ndx_namespaces(assembly_templates)
-
1
assembly_templates.each do |a|
-
if namespace = ndx_namespaces[a[:id]]
-
a[:namespace] ||= namespace
-
end
-
end
-
1
assembly_templates
-
end
-
-
# indexed by assembly_template id
-
1
def self.get_ndx_namespaces(assembly_templates)
-
1
ret = Hash.new
-
1
return ret if assembly_templates.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:module_branch_id,:assembly_template_namespace_info],
-
:filter => [:oneof,:id,assembly_templates.map{|a|a.id()}]
-
}
-
mh = assembly_templates.first.model_handle()
-
get_objs(mh,sp_hash).inject(Hash.new) do |h,r|
-
h.merge(r[:id] => r[:namespace])
-
end
-
end
-
1
private_class_method :get_ndx_namespaces
-
-
1
def get_settings(opts={})
-
sp_hash = {
-
:cols => opts[:cols]||ServiceSetting.common_columns(),
-
:filter => [:eq, :component_component_id, id()]
-
}
-
service_setting_mh = model_handle(:service_setting)
-
Model.get_objs(service_setting_mh,sp_hash)
-
end
-
-
1
def self.get_augmented_component_refs(mh,opts={})
-
sp_hash = {
-
:cols => [:id, :display_name,:component_type,:module_branch_id,:augmented_component_refs],
-
:filter => [:and, [:eq, :type, "composite"], [:neq, :project_project_id, nil], opts[:filter]].compact
-
}
-
assembly_rows = get_objs(mh.createMH(:component),sp_hash)
-
-
# look for version contraints which are on a per component module basis
-
aug_cmp_refs_ndx_by_vc = Hash.new
-
assembly_rows.each do |r|
-
component_ref = r[:component_ref]
-
unless component_type = component_ref[:component_type]||(r[:component_template]||{})[:component_type]
-
Log.error("Component ref with id #{r[:id]}) does not have a component type associated with it")
-
else
-
service_module_name = service_module_name(r[:component_type])
-
pntr = aug_cmp_refs_ndx_by_vc[service_module_name]
-
unless pntr
-
component_module_refs = opts[:component_module_refs] || ModuleRefs.get_component_module_refs(mh.createIDH(:model_name => :module_branch, :id => r[:module_branch_id]).create_object())
-
-
pntr = aug_cmp_refs_ndx_by_vc[service_module_name] = {
-
:component_module_refs => component_module_refs
-
}
-
end
-
aug_cmp_ref = r[:component_ref].merge(r.hash_subset(:component_template,:node))
-
(pntr[:aug_cmp_refs] ||= Array.new) << aug_cmp_ref
-
end
-
end
-
set_matching_opts = Aux.hash_subset(opts,[:force_compute_template_id])
-
aug_cmp_refs_ndx_by_vc.each_value do |r|
-
r[:component_module_refs].set_matching_component_template_info?(r[:aug_cmp_refs],set_matching_opts)
-
end
-
aug_cmp_refs_ndx_by_vc.values.map{|r|r[:aug_cmp_refs]}.flatten
-
end
-
### end: standard get methods
-
-
1
def self.service_module_name(component_type_field)
-
component_type_field.gsub(/__.+$/,'')
-
end
-
1
private_class_method :service_module_name
-
-
-
1
def self.list(assembly_mh,opts={})
-
List.list(assembly_mh,opts)
-
end
-
-
1
def info_about(about, opts=Opts.new)
-
case about
-
when :components
-
List.list_components(self)
-
when :nodes
-
List.list_nodes(self)
-
else
-
raise Error.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end
-
end
-
-
1
def self.list_modules(assembly_templates)
-
List.list_modules(assembly_templates)
-
end
-
-
-
1
def self.get(mh,opts={})
-
sp_hash = {
-
:cols => opts[:cols] || [:id, :group_id,:display_name,:component_type,:module_branch_id, :description, :service_module],
-
:filter => [:and, [:eq, :type, "composite"],
-
opts[:project_idh] ? [:eq,:project_project_id,opts[:project_idh].get_id()] : [:neq, :project_project_id,nil],
-
opts[:filter]
-
].compact
-
}
-
ret = get_these_objs(mh,sp_hash,:keep_ref_cols => true)
-
ret.each{|r|r[:version] ||= (r[:module_branch]||{})[:version]}
-
ret
-
end
-
-
1
def self.list_virtual_column?(detail_level=nil)
-
if detail_level.nil?
-
nil
-
elsif detail_level == "nodes"
-
:template_stub_nodes
-
else
-
raise Error.new("not implemented list_virtual_column at detail level (#{detail_level})")
-
end
-
end
-
-
-
1
def self.delete_and_ret_module_repo_info(assembly_idh)
-
# first delete the dsl files
-
module_repo_info = ServiceModule.delete_assembly_dsl?(assembly_idh)
-
# need to explicitly delete nodes, but not components since node's parents are not the assembly, while component's parents are the nodes
-
# do not need to delete port links which use a cascade foreign key
-
delete_model_objects(assembly_idh)
-
module_repo_info
-
end
-
-
1
def self.delete_model_objects(assembly_idh)
-
delete_assemblies_nodes([assembly_idh])
-
delete_instance(assembly_idh)
-
end
-
-
1
def self.delete_assemblies_nodes(assembly_idhs)
-
ret = Array.new
-
return ret if assembly_idhs.empty?
-
node_idhs = get_nodes(assembly_idhs).map{|n|n.id_handle()}
-
Model.delete_instances(node_idhs)
-
end
-
-
1
def self.check_valid_id(model_handle,id)
-
filter =
-
[:and,
-
[:eq, :id, id],
-
[:eq, :type, "composite"],
-
[:neq, :project_project_id, nil]]
-
check_valid_id_helper(model_handle,id,filter)
-
end
-
1
def self.name_to_id(model_handle,name)
-
parts = name.split("/")
-
augmented_sp_hash =
-
if parts.size == 1
-
{:cols => [:id,:component_type],
-
:filter => [:and,
-
[:eq, :component_type, pp_name_to_component_type(parts[0])],
-
[:eq, :type, "composite"],
-
[:neq, :project_project_id, nil]]
-
}
-
else
-
raise ErrorNameInvalid.new(name,pp_object_type())
-
end
-
name_to_id_helper(model_handle,name,augmented_sp_hash)
-
end
-
-
1
def self.get_service_module?(project,service_module_name,namespace)
-
ret = nil
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:namespace],
-
:filter => [:eq,:display_name,service_module_name]
-
}
-
get_objs(project.model_handle(:service_module),sp_hash).find{|r|r[:namespace][:display_name] == namespace}
-
end
-
-
# TODO: probably move to Assembly
-
1
def model_handle(mn=nil)
-
super(mn||:component)
-
end
-
-
1
private
-
1
ModuleTemplateSep = '__'
-
-
# returns [service_module_name,assembly_name]
-
1
def self.parse_component_type(component_type)
-
component_type.split(ModuleTemplateSep)
-
end
-
-
1
def self.component_type(service_module_name,template_name)
-
"#{service_module_name}#{ModuleTemplateSep}#{template_name}"
-
end
-
end
-
end
-
# TODO: hack to get around error in /home/dtk/server/system/model.rb:31:in `const_get
-
1
AssemblyTemplate = Assembly::Template
-
end
-
1
r8_require('../../factory_object_type')
-
1
module DTK
-
2
class Assembly; class Template
-
1
class Factory < self
-
1
r8_nested_require('factory','non_default_attribute')
-
1
extend FactoryObjectClassMixin
-
1
include FactoryObjectMixin
-
-
1
def self.get_or_create_service_module(project,service_module_name,opts={})
-
unless namespace = opts[:namespace]
-
raise Error.new("Need to update code so that namespace passed in")
-
end
-
if service_module = get_service_module?(project,service_module_name,namespace)
-
service_module
-
else
-
raise ErrorUsage.new("Unable to create assembly because service module (#{namespace}:#{service_module_name}) clone exists on local machine but missing from server. You should import service module or delete local clone and try again.") if opts[:local_clone_dir_exists]
-
-
if opts[:mode] == :update
-
raise ErrorUsage.new("Service module (#{service_module_name}) does not exist")
-
end
-
-
local_params = ModuleBranch::Location::LocalParams::Server.new(
-
:module_type => :service_module,
-
:module_name => service_module_name,
-
:namespace => namespace,
-
:version => nil
-
)
-
-
# TODO: look to remove :config_agent_type
-
module_and_branch_info = ServiceModule.create_module(project,local_params,:config_agent_type => ConfigAgent::Type.default_symbol)
-
module_and_branch_info[:module_idh].create_object()
-
end
-
end
-
-
# creates a new assembly template if it does not exist
-
1
def self.create_or_update_from_instance(assembly_instance, service_module, assembly_name, opts = {})
-
assembly_factory = create_assembly_factory(assembly_instance, service_module, assembly_name, opts)
-
assembly_factory.raise_error_if_integrity_error()
-
assembly_factory.create_assembly_template()
-
end
-
-
1
def create_assembly_template
-
add_content_for_clone!()
-
create_assembly_template_aux()
-
end
-
-
1
def set_object_attributes!(project_idh,assembly_instance,service_module,service_module_branch)
-
@project_idh = project_idh
-
@assembly_instance = assembly_instance
-
@service_module = service_module
-
@service_module_branch = service_module_branch
-
@assembly_component_modules = assembly_instance.get_component_modules(:get_version_info=>true)
-
@component_module_refs = service_module.get_component_module_refs()
-
self
-
end
-
-
1
def raise_error_if_integrity_error()
-
raise_error_if_inconsistent_mod_refs()
-
end
-
-
1
private
-
1
def raise_error_if_inconsistent_mod_refs()
-
mismatched_cmp_mods = Array.new
-
@assembly_component_modules.each do |cmp_mod|
-
cmp_mod_name = cmp_mod[:display_name]
-
if namespace = @component_module_refs.matching_component_module_namespace?(cmp_mod_name)
-
if namespace != cmp_mod[:namespace_name]
-
mismatch = {
-
:module_name => cmp_mod_name,
-
:template_ns => namespace,
-
:instance_ns => cmp_mod[:namespace_name]
-
}
-
mismatched_cmp_mods << mismatch
-
end
-
end
-
end
-
unless mismatched_cmp_mods.empty?
-
err_msg = "Cannot push to service module (#{@service_module.get_field?(:display_name)}) because the following mismatches in namespaces:\n"
-
mismatched_cmp_mods.each do |el|
-
err_msg << " Component module (#{el[:module_name]}) in instance has namespace (#{el[:instance_ns]}), but namespace (#{el[:template_ns]}) in service module\n"
-
end
-
err_msg << "Alternatives are to push to another service module or change the service module's #{ModuleRefs.meta_filename_path()} file"
-
raise ErrorUsage.new(err_msg)
-
end
-
end
-
-
1
def self.create_assembly_factory(assembly_instance, service_module, assembly_name, opts = {})
-
service_module_name = service_module.get_field?(:display_name)
-
local_params = ModuleBranch::Location::LocalParams::Server.new(
-
:module_type => :service_module,
-
:module_name => service_module_name,
-
:namespace => service_module.module_namespace(),
-
:version => opts[:version]
-
)
-
service_module_branch = service_module.get_module_branch_from_local_params(local_params)
-
project_idh = service_module.get_project().id_handle()
-
-
assembly_mh = project_idh.create_childMH(:component)
-
if ret = exists?(assembly_mh,service_module,assembly_name)
-
if opts[:mode] == :create
-
raise ErrorUsage.new("Assembly (#{assembly_name}) already exists in service module (#{service_module_name})")
-
end
-
ret.set_object_attributes!(project_idh,assembly_instance,service_module,service_module_branch)
-
else
-
if opts[:mode] == :update
-
raise ErrorUsage.new("Assembly (#{assembly_name}) does not exist in service module (#{service_module_name})")
-
end
-
assembly_mh = project_idh.create_childMH(:component)
-
hash_values = {
-
:project_project_id => project_idh.get_id(),
-
:ref => service_module.assembly_ref(assembly_name),
-
:display_name => assembly_name,
-
:type => "composite",
-
:module_branch_id => service_module_branch[:id],
-
:component_type => Assembly.ret_component_type(service_module_name,assembly_name)
-
}
-
hash_values.merge!(:description => opts[:description]) if opts[:description]
-
ret = create(assembly_mh,hash_values)
-
ret.set_object_attributes!(project_idh,assembly_instance,service_module,service_module_branch)
-
end
-
end
-
-
1
public
-
1
attr_reader :assembly_instance
-
1
private
-
1
attr_reader :project_idh,:service_module_branch
-
-
1
def project_uri()
-
@project_uri ||= @project_idh.get_uri()
-
end
-
-
1
def add_content_for_clone!()
-
node_idhs = assembly_instance.get_nodes().map(&:id_handle)
-
if node_idhs.empty?
-
raise ErrorUsage.new("Cannot find any nodes associated with assembly (#{assembly_instance.get_field?(:display_name)})")
-
end
-
-
# 1) get a content object, 2) modify, and 3) persist
-
port_links, dangling_links = Node.get_conn_port_links(node_idhs)
-
# TODO: raise error to user if dangling link
-
Log.error("dangling links #{dangling_links.inspect}") unless dangling_links.empty?
-
-
task_templates = assembly_instance.get_task_templates_with_serialized_content()
-
-
node_scalar_cols = FactoryObject::CommonCols + [:type, :node_binding_rs_id]
-
node_mh = node_idhs.first.createMH()
-
node_ids = node_idhs.map(&:get_id)
-
-
# get assembly-level attributes
-
assembly_level_attrs = assembly_instance.get_assembly_level_attributes().reject do |a|
-
a[:attribute_value].nil?
-
end
-
-
# get node-level attributes
-
ndx_node_level_attrs = {}
-
Node.get_node_level_assembly_template_attributes(node_idhs).each do |r|
-
(ndx_node_level_attrs[r[:node_node_id]] ||= []) << r
-
end
-
-
# get contained ports
-
sp_hash = {
-
:cols => [:id, :display_name, :ports_for_clone],
-
:filter => [:oneof, :id, node_ids]
-
}
-
@ndx_ports = {}
-
node_port_mapping = {}
-
Model.get_objs(node_mh, sp_hash, :keep_ref_cols => true).each do |r|
-
port = r[:port].merge(:link_def => r[:link_def])
-
(node_port_mapping[r[:id]] ||= []) << port
-
@ndx_ports[port[:id]] = port
-
end
-
-
# get contained components-non-default attribute candidates
-
sp_hash = {
-
:cols => node_scalar_cols + [:cmps_and_non_default_attr_candidates],
-
:filter => [:oneof, :id, node_ids]
-
}
-
-
node_cmp_attr_rows = Model.get_objs(node_mh, sp_hash, :keep_ref_cols => true)
-
if node_cmp_attr_rows.empty?
-
raise ErrorUsage.new('No components in the nodes being grouped to be an assembly template')
-
end
-
cmp_scalar_cols = node_cmp_attr_rows.first[:component].keys - [:non_default_attr_candidate]
-
@ndx_nodes = {}
-
node_cmp_attr_rows.each do |r|
-
node_id = r[:id]
-
@ndx_nodes[node_id] ||=
-
r.hash_subset(*node_scalar_cols).merge(
-
:components => [],
-
:ports => node_port_mapping[node_id],
-
:attributes => ndx_node_level_attrs[node_id]
-
)
-
cmps = @ndx_nodes[node_id][:components]
-
cmp_id = r[:component][:id]
-
unless matching_cmp = cmps.find { |cmp| cmp[:id] == cmp_id }
-
matching_cmp = r[:component].hash_subset(*cmp_scalar_cols).merge(:non_default_attributes => Array.new)
-
cmps << matching_cmp
-
end
-
if attr_cand = r[:non_default_attr_candidate]
-
if non_default_attr = NonDefaultAttribute.isa?(attr_cand, matching_cmp)
-
matching_cmp[:non_default_attributes] << non_default_attr
-
end
-
end
-
end
-
update_hash = {
-
:nodes => @ndx_nodes.values,
-
:port_links => port_links,
-
:assembly_level_attributes => assembly_level_attrs
-
}
-
merge!(update_hash)
-
merge!(:task_templates => task_templates) unless task_templates.empty?
-
self
-
end
-
-
# TODO: can collapse above and below; aboves looks like extra intermediate level
-
1
def create_assembly_template_aux
-
nodes = self[:nodes].inject(DBUpdateHash.new){ |h, node| h.merge(create_node_content(node)) }
-
port_links = self[:port_links].inject(DBUpdateHash.new){ |h, pl| h.merge(create_port_link_content(pl)) }
-
task_templates = self[:task_templates].inject(DBUpdateHash.new){ |h, tt| h.merge(create_task_template_content(tt)) }
-
assembly_level_attributes = self[:assembly_level_attributes].inject(DBUpdateHash.new){ |h, a| h.merge(create_assembly_level_attributes(a)) }
-
-
# only need to mark as complete if assembly template exists already
-
if assembly_template_idh = id_handle_if_object_exists?()
-
assembly_template_id = assembly_template_idh.get_id()
-
nodes.mark_as_complete({ :assembly_id => assembly_template_id }, :apply_recursively => true)
-
port_links.mark_as_complete(:assembly_id => assembly_template_id)
-
task_templates.mark_as_complete(:component_component_id => assembly_template_id)
-
assembly_level_attributes.mark_as_complete(:component_component_id => assembly_template_id)
-
end
-
-
@template_output = ServiceModule::AssemblyExport.create(self, project_idh, service_module_branch)
-
assembly_ref = self[:ref]
-
assembly_hash = hash_subset(:display_name, :type, :ui, :module_branch_id, :component_type)
-
-
# description = self[:description]||@assembly_instance.get_field?(:description)
-
description = self[:description] || self[:display_name]
-
assembly_hash.merge!(:description => description) if description
-
-
assembly_hash.merge!(:task_template => task_templates) unless task_templates.empty?
-
assembly_hash.merge!(:attribute => assembly_level_attributes) unless assembly_level_attributes.empty?
-
assembly_hash.merge!(:port_link => port_links) unless port_links.empty?
-
@template_output.merge!(:node => nodes, :component => { assembly_ref => assembly_hash })
-
module_refs_updated = @component_module_refs.update_object_if_needed!(@assembly_component_modules)
-
-
Transaction do
-
@template_output.save_to_model()
-
if module_refs_updated
-
@component_module_refs.update() # update the object model
-
@component_module_refs.serialize_and_save_to_repo?(:update_module_refs => true)
-
end
-
-
# serialize_and_save_to_repo? returns new_commit_sha
-
@template_output.serialize_and_save_to_repo?()
-
end
-
end
-
-
1
def self.exists?(assembly_mh,service_module,template_name)
-
ret = nil
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:and,[:eq,:service_id,service_module.id()]]
-
}
-
module_branches = get_objs(service_module.model_handle(:module_branch),sp_hash)
-
return ret if module_branches.empty?
-
-
service_module_name = service_module.get_field?(:display_name)
-
component_type = component_type(service_module_name,template_name)
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id,:component_type,:project_project_id,:ref,:ui,:type,:module_branch_id],
-
:filter =>
-
[:and,
-
[:eq, :type, "composite"],
-
# Aldin: added ancestor_id==nil check to distinct between service instance (has ancestor_id) and assembly-template
-
# with same name (does not have ancestor_id)
-
[:eq, :ancestor_id, nil],
-
[:eq, :component_type, component_type],
-
[:oneof, :module_branch_id, module_branches.map{|r|r.id()}]]
-
}
-
if row = get_obj(assembly_mh,sp_hash,:keep_ref_cols => true)
-
subclass_model(row) # so that what is returned is object of type Assembly::Template::Factory
-
end
-
end
-
-
1
def create_port_link_content(port_link)
-
in_port = @ndx_ports[port_link[:input_id]]
-
in_node_ref = node_ref(@ndx_nodes[in_port[:node_node_id]])
-
in_port_ref = qualified_ref(in_port)
-
out_port = @ndx_ports[port_link[:output_id]]
-
out_node_ref = node_ref(@ndx_nodes[out_port[:node_node_id]])
-
out_port_ref = qualified_ref(out_port)
-
-
assembly_ref = self[:ref]
-
port_link_ref_info = {
-
:assembly_template_ref => assembly_ref,
-
:in_node_ref => in_node_ref,
-
:in_port_ref => in_port_ref,
-
:out_node_ref => out_node_ref,
-
:out_port_ref => out_port_ref
-
}
-
port_link_ref = PortLink.port_link_ref(port_link_ref_info)
-
port_link_hash = {
-
"*input_id" => "/node/#{in_node_ref}/port/#{in_port_ref}",
-
"*output_id" => "/node/#{out_node_ref}/port/#{out_port_ref}",
-
"*assembly_id" => "/component/#{assembly_ref}"
-
}
-
{port_link_ref => port_link_hash}
-
end
-
-
1
def get_ndx_target_port_refs(relative_port_refs_x)
-
relative_port_refs = relative_port_refs_x.map{|pr|pr.gsub(/^\//,'')}
-
IDInfoTable.get_ndx_ids_matching_relative_uris(@project_idh,project_uri(),relative_port_refs).inject(Hash.new) do |h,(k,v)|
-
h.merge("/#{k}" => v)
-
end
-
end
-
-
1
def create_task_template_content(task_template)
-
ref,create_hash = Task::Template.ref_and_create_hash(task_template[:content],task_template[:task_action])
-
{ref => create_hash}
-
end
-
-
1
def create_assembly_level_attributes(attr)
-
ref = display_name = attr[:display_name]
-
create_hash = {
-
:display_name => display_name,
-
:value_asserted => attr[:attribute_value],
-
:data_type => attr[:data_type]||Attribute::Datatype.default()
-
}
-
{ref => create_hash}
-
end
-
-
1
def create_node_content(node)
-
node_ref = node_ref(node)
-
cmp_refs = node[:components].inject(Hash.new){|h,cmp|h.merge(create_component_ref_content(cmp))}
-
ports = (node[:ports]||[]).inject(Hash.new){|h,p|h.merge(create_port_content(p))}
-
node_attrs = (node[:attributes]||[]).inject(Hash.new){|h,a|h.merge(create_node_attribute_content(a))}
-
node_hash = Aux::hash_subset(node,[:display_name,:node_binding_rs_id])
-
node_type =
-
if node[:display_name].eql?('assembly_wide')
-
'assembly_wide'
-
else
-
node.is_node_group?() ? Node::Type::NodeGroup.stub : Node::Type::Node.stub
-
end
-
node_hash.merge!(
-
"*assembly_id" => "/component/#{self[:ref]}",
-
:type => node_type,
-
:component_ref => cmp_refs,
-
:port => ports,
-
:attribute => node_attrs
-
)
-
{node_ref => node_hash}
-
end
-
-
1
def create_port_content(port)
-
port_ref = qualified_ref(port)
-
port_hash = Aux::hash_subset(port,[:display_name,:description,:type,:direction,:link_type,:component_type])
-
port_hash.merge!(:link_def_id => port[:link_def][:ancestor_id]) if port[:link_def]
-
{port_ref => port_hash}
-
end
-
-
1
def create_node_attribute_content(attr)
-
attr_ref = attr[:display_name]
-
attr_hash = Aux::hash_subset(attr,[:display_name,:value_asserted,:value_derived,:data_type])
-
{attr_ref => attr_hash}
-
end
-
-
1
def create_component_ref_content(cmp)
-
cmp_ref_ref = ComponentRef.ref_from_component_hash(cmp)
-
cmp_ref_hash = Aux::hash_subset(cmp,[:display_name,:description,:component_type])
-
cmp_template_id = cmp[:ancestor_id]
-
cmp_ref_hash.merge!(:component_template_id => cmp_template_id)
-
attrs = cmp[:non_default_attributes]
-
unless attrs.nil? or attrs.empty?
-
NonDefaultAttribute.add_to_cmp_ref_hash!(cmp_ref_hash,self,attrs,cmp_template_id)
-
end
-
{cmp_ref_ref => cmp_ref_hash}
-
end
-
-
1
def node_ref(node)
-
assembly_template_node_ref(self[:ref],node[:display_name])
-
end
-
end
-
end; end
-
end
-
3
module DTK; class Assembly; class Template
-
1
class Factory
-
1
class NonDefaultAttribute < ::Hash
-
1
attr_reader :is_title_attribute
-
1
def initialize(attr,cmp)
-
super()
-
replace(Aux::hash_subset(attr,[:display_name,:description,:ref,:tags,:is_instance_value]))
-
self[:attribute_value] = attr[:attribute_value] # virtual attributes do not work in Aux::hash_subset
-
@is_title_attribute = ((not cmp[:only_one_per_node]) and attr.is_title_attribute?())
-
end
-
-
1
def self.isa?(attr,cmp)
-
if isa_value_override?(attr) or !!base_tags?(attr)
-
new(attr,cmp)
-
end
-
end
-
-
1
def isa_value_override?()
-
self.class.isa_value_override?(self)
-
end
-
1
def self.isa_value_override?(attr)
-
attr[:is_instance_value] and !attr[:attribute_value].nil?
-
end
-
-
1
def base_tags?()
-
self.class.base_tags?(self)
-
end
-
1
def self.base_tags?(attr)
-
if attr[:tags] = HierarchicalTags.reify(attr[:tags])
-
attr[:tags].base_tags?()
-
end
-
end
-
-
1
def self.add_to_cmp_ref_hash!(cmp_ref_hash,factory,non_def_attrs,cmp_template_id)
-
attr_names = non_def_attrs.map{|a|a[:display_name]}
-
sp_hash = {
-
:cols => [:id,:display_name,:data_type,:semantic_data_type],
-
:filter => [:and,[:eq,:component_component_id,cmp_template_id],[:oneof,:display_name,attr_names]]
-
}
-
ndx_attrs = Model.get_objs(factory.model_handle(:attribute),sp_hash).inject(Hash.new) do |h,r|
-
h.merge(r[:display_name] => r)
-
end
-
attr_override = cmp_ref_hash[:attribute_override] = Hash.new
-
non_def_attrs.each do |non_def_attr|
-
if attribute_template = ndx_attrs[non_def_attr[:display_name]]
-
non_def_attr[:attribute_template_id] = attribute_template[:id]
-
non_def_attr.merge!(Aux::hash_subset(attribute_template,[:data_type,:semantic_data_type]))
-
else
-
component_type = Component.display_name_print_form(cmp_ref_hash[:component_type])
-
module_name = Component.module_name(cmp_ref_hash[:component_type])
-
raise ErrorUsage.new("Attribute (#{non_def_attr[:display_name]}) does not exist in base component (#{component_type}); you may need to invoke push-module-updates #{module_name}")
-
end
-
attr_override[non_def_attr[:ref]] = non_def_attr
-
end
-
end
-
end
-
end
-
end; end; end
-
1
module DTK
-
2
class Assembly; class Template
-
1
class List < self
-
1
def self.list(assembly_mh,opts={})
-
assembly_mh = assembly_mh.createMH(:assembly_template) # to insure right mh type
-
opts = opts.merge(:cols => [:id, :group_id,:display_name,:component_type,:module_branch_id,:service_module,list_virtual_column?(opts[:detail_level])].compact)
-
assembly_rows = get(assembly_mh,opts)
-
if opts[:detail_level] == "attributes"
-
attr_rows = get_component_attributes(assembly_mh,assembly_rows)
-
list_aux(assembly_rows,attr_rows,opts)
-
else
-
list_aux__simple(assembly_rows,opts)
-
end
-
end
-
-
1
def self.list_modules(assembly_templates)
-
components = []
-
assembly_templates.each do |assembly|
-
components << assembly.info_about(:components)
-
end
-
components.flatten
-
end
-
-
1
def self.list_components(assembly_template)
-
sp_hash = {
-
:filter => [:eq,:id,assembly_template.id()]
-
}
-
mh = assembly_template.model_handle
-
aug_component_refs = get_augmented_component_refs(mh,sp_hash)
-
aug_component_refs.map do |r|
-
cmp_template = r[:component_template]
-
node_name = r[:node].is_assembly_wide_node?() ? '' : "#{r[:node][:display_name]}/"
-
display_name = "#{node_name}#{r.display_name_print_form()}"
-
version = ModuleBranch.version_from_version_field(cmp_template[:version])
-
cmp_template.hash_subset(:id).merge(:display_name => display_name, :version => version)
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def self.list_nodes(assembly_template)
-
sp_hash = {:cols => [:node_templates]}
-
assembly_template.get_objs(sp_hash).map do |r|
-
el = r[:node].hash_subset(:id,:display_name)
-
el[:dtk_client_hidden] = el.is_assembly_wide_node?()
-
case r[:node][:type]
-
when 'node_group_stub'
-
el.merge!(:type => 'node_group')
-
when 'stub'
-
el.merge!(:type => 'node')
-
end
-
if binding = r[:node_binding]
-
binding_fields = binding.hash_subset(:os_type,{:display_name => :template_name})
-
common_fields = binding.ret_common_fields_or_that_varies()
-
common_fields_to_add = Aux::hash_subset(common_fields,[{:type => :template_type},:image_id,:size,:region]).reject{|k,v|v == :varies}
-
binding_fields.merge!(common_fields_to_add)
-
el.merge!(binding_fields)
-
end
-
el
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
private
-
1
def self.get_component_attributes(assembly_mh,template_assembly_rows,opts={})
-
# get attributes on templates (these are defaults)
-
ret = get_default_component_attributes(assembly_mh,template_assembly_rows,opts)
-
-
# get attribute overrides
-
sp_hash = {
-
:cols => [:id,:display_name,:attribute_value,:attribute_template_id],
-
:filter => [:oneof, :component_ref_id,template_assembly_rows.map{|r|r[:component_ref][:id]}]
-
}
-
attr_override_rows = Model.get_objs(assembly_mh.createMH(:attribute_override),sp_hash)
-
unless attr_override_rows.empty?
-
ndx_attr_override_rows = attr_override_rows.inject(Hash.new) do |h,r|
-
h.merge(r[:attribute_template_id] => r)
-
end
-
ret.each do |r|
-
if override = ndx_attr_override_rows[r[:id]]
-
r.merge!(:attribute_value => override[:attribute_value], :is_instance_value => true)
-
end
-
end
-
end
-
ret
-
end
-
-
1
def self.list_aux__simple(assembly_rows,opts={})
-
ndx_ret = Hash.new
-
if opts[:detail_level] == "components"
-
raise Error.new("list assembly templates at component level not treated")
-
end
-
include_nodes = ["nodes"].include?(opts[:detail_level])
-
pp_opts = Aux.hash_subset(opts,[:no_module_prefix,:version_suffix])
-
assembly_rows.each do |r|
-
# TODO: hack to create a Assembly object (as opposed to row which is component); should be replaced by having
-
# get_objs do this (using possibly option flag for subtype processing)
-
pntr = ndx_ret[r[:id]] ||= r.id_handle.create_object().merge(:display_name => pretty_print_name(r,pp_opts),:ndx_nodes => Hash.new)
-
pntr.merge!(:module_branch_id => r[:module_branch_id]) if r[:module_branch_id]
-
# TODO: should replace with something more robust to find namespace
-
if namespace = Namespace.namespace_from_ref?(r[:service_module][:ref])
-
pntr.merge!(:namespace => namespace)
-
end
-
-
if version = pretty_print_version(r)
-
pntr.merge!(:version => version)
-
end
-
next unless include_nodes
-
node_id = r[:node][:id]
-
unless node = pntr[:ndx_nodes][node_id]
-
node = pntr[:ndx_nodes][node_id] = {
-
:node_name => r[:node][:display_name],
-
:node_id => node_id
-
}
-
node[:external_ref] = r[:node][:external_ref] if r[:node][:external_ref]
-
node[:os_type] = r[:node][:os_type] if r[:node][:os_type]
-
end
-
end
-
-
unsorted = ndx_ret.values.map do |r|
-
el = r.slice(:id,:display_name,:module_branch_id,:version,:namespace)
-
include_nodes ? el.merge(:nodes => r[:ndx_nodes].values) : el
-
end
-
opts[:no_sorting] ? unsorted : unsorted.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
end
-
end
-
end; end
-
1
module DTK
-
2
class Assembly; class Template
-
1
module PrettyPrint
-
1
module Mixin
-
1
def display_name_print_form(opts={})
-
if opts.empty?
-
# TODO: may be able to get rid of this clause
-
Template.pp_display_name(get_field?(:component_type))
-
else
-
Template.pretty_print_name(self,opts)
-
end
-
end
-
end
-
-
1
module ClassMixin
-
1
PPModuleTemplateSep = '::'
-
1
PPServiceModuleAssemblyDelim = '/'
-
-
1
def pp_display_name(component_type)
-
component_type.gsub(Regexp.new(Template::ModuleTemplateSep),PPModuleTemplateSep)
-
end
-
1
def pp_name_to_component_type(pp_name)
-
pp_name.gsub(Regexp.new(PPModuleTemplateSep),Template::ModuleTemplateSep)
-
end
-
-
1
def pretty_print_name(assembly_template,opts={})
-
assembly_name,module_name = pretty_print_module_and_assembly(assembly_template,opts)
-
if opts[:no_module_prefix] or module_name.nil?
-
assembly_name
-
elsif opts[:service_module_context_path]
-
"#{module_name}/assembly/#{assembly_name}"
-
else
-
"#{module_name}#{PPServiceModuleAssemblyDelim}#{assembly_name}"
-
end
-
end
-
-
#returns [assembly_template_name,module_name] in pretty print form
-
1
def pretty_print_module_and_assembly(assembly_template,opts={})
-
assembly_name = module_name = nil
-
if cmp_type = assembly_template.get_field?(:component_type)
-
split = cmp_type.split(Template::ModuleTemplateSep)
-
if split.size == 2
-
module_name, assembly_name = split
-
end
-
end
-
assembly_name ||= assembly_template.get_field?(:display_name) # heurstic
-
-
if opts[:version_suffix]
-
if version = pretty_print_version(assembly_template)
-
assembly_name << "-v#{version}"
-
end
-
end
-
if opts[:include_namespace]
-
module_name = add_namespace_name(module_name,assembly_template)
-
end
-
[assembly_name,module_name]
-
end
-
-
1
def add_namespace_name(module_name,assembly_template)
-
namespace_name = nil
-
if namespace = assembly_template[:namespace]
-
if namespace.kind_of?(String)
-
namespace_name = namespace
-
elsif namespace.kind_of?(Hash)
-
namespace_name = namespace[:display_name]
-
else
-
raise Error.new("assembly_template[:namespace] is unexpected type")
-
end
-
end
-
-
if namespace_name
-
module_name && Namespace.join_namespace(namespace_name, module_name)
-
else
-
Log.error("Unexpected that opts[:include_namespace] is true and no namespace object in assembly")
-
module_name
-
end
-
end
-
end
-
end
-
end; end
-
end
-
-
-
# TODO: move files to inside DTK::Attribute
-
1
files =
-
[
-
'dependency_analysis',
-
'group',
-
'complex_type'
-
]
-
1
r8_nested_require('attribute',files)
-
1
module DTK
-
1
class Attribute < Model
-
1
set_relation_name(:attribute,:attribute)
-
-
1
r8_nested_require('attribute','get_method')
-
1
r8_nested_require('attribute','meta')
-
1
r8_nested_require('attribute','datatype')
-
1
r8_nested_require('attribute','propagate_changes')
-
1
r8_nested_require('attribute','pattern')
-
1
r8_nested_require('attribute','legal_value')
-
1
r8_nested_require('attribute','special_processing')
-
1
r8_nested_require('attribute','constant')
-
1
r8_nested_require('attribute','print_form')
-
1
r8_nested_require('attribute','semantic_datatype')
-
1
r8_nested_require('attribute','dangling_links_class_mixin')
-
1
r8_nested_require('attribute','update_derived_values')
-
-
1
include GetMethod::Mixin
-
1
extend GetMethod::ClassMixin
-
1
include AttributeGroupInstanceMixin
-
1
include DatatypeMixin
-
1
extend AttrDepAnalaysisClassMixin
-
1
extend AttributeGroupClassMixin
-
1
include ConstantMixin
-
1
include PrintFormMixin
-
1
extend PrintFormClassMixin
-
1
extend PropagateChangesClassMixin
-
1
extend MetaClassMixin
-
1
extend DanglingLinksClassMixin
-
-
1
def self.common_columns()
-
3
[:id,:display_name,:group_id,:hidden,:description,:component_component_id,:value_derived,:value_asserted,:semantic_data_type,:semantic_type,:semantic_type_summary,:data_type,:required,:dynamic,:cannot_change,:port_type_asserted,:is_port,:external_ref,:read_only,:tags]
-
end
-
-
1
def self.legal_display_name?(display_name)
-
display_name =~ LegalDisplayName
-
end
-
1
LegalDisplayName = /^[a-zA-Z0-9_\[\]\.-]+$/
-
-
1
def self.default_title_field()
-
1
'name'
-
end
-
-
-
# TODO: may make this a real field in attribute
-
1
def title()
-
self[:attribute_value] if is_title_attribute?()
-
end
-
-
1
def is_title_attribute?()
-
get_field?(:display_name) == "name" or ext_ref_indicates_title?(get_field?(:external_ref))
-
end
-
-
1
def ext_ref_indicates_title?(ext_ref)
-
ret =
-
if ext_ref[:type] == "puppet_attribute"
-
if path = ext_ref[:path]
-
path =~ /\[name\]$/
-
end
-
end
-
!!ret
-
end
-
1
private :ext_ref_indicates_title?
-
-
1
def config_agent_type()
-
external_ref_type = (self[:external_ref]||{})[:type]
-
case external_ref_type
-
when "chef_attribute" then "chef"
-
when "puppet_attribute" then "puppet"
-
end
-
end
-
-
1
def filter_when_listing?(opts={})
-
if self[:hidden]
-
return true
-
end
-
if opts[:editable] and is_readonly?
-
return true
-
end
-
if filter_base_tags = opts[:tags]
-
common_tags = (base_tags?()||[]) & filter_base_tags.map{|x|x.to_sym}
-
return common_tags.empty?
-
end
-
false
-
end
-
-
# assume this is called when :tags is pull from db
-
1
def base_tags?()
-
if self[:tags] = HierarchicalTags.reify(self[:tags])
-
self[:tags].base_tags?()
-
end
-
end
-
-
1
def self.create_or_modify_field_def(parent,field_def)
-
attr_mh = parent.model_handle.create_childMH(:attribute)
-
attr_hash = Aux::hash_subset(field_def,CreateFields)
-
unless attr_hash[:display_name]
-
raise Error.new("display_name required in field_def")
-
end
-
attr_hash[:ref] = attr_hash[:display_name]
-
attr_hash[:semantic_data_type] ||= SemanticDatatype.default().to_s
-
attr_hash[:data_type] ||= SemanticDatatype.datatype(attr_hash[:semantic_data_type]).to_s
-
# TODO: may use a method rather than below that is more efficient; below returns alll children rather than filtered search
-
Model.modify_children_from_rows(attr_mh,parent.id_handle,[attr_hash],[:ref],:update_matching => true,:no_delete => true)
-
end
-
6
CreateFields = [:display_name,:data_type,:dynamic,:required,:semantic_data_type].map{|sym|{sym.to_s => sym}} + [{'default' => :value_asserted}]
-
-
-
# TODO: collapse this and 4 fields used here
-
1
def is_readonly?()
-
update_object!(*(VirtulaDependency.port_type()+[:read_only,:dynamic,:cannot_change]))
-
(self[:port_type] == "input") or self[:read_only] or self[:dynamic] or self[:cannot_change]
-
end
-
-
1
def attribute_value()
-
self[:value_asserted] || self[:value_derived]
-
end
-
-
1
def semantic_type_object()
-
SemanticType.create_from_attribute(self)
-
end
-
-
# TODO: modify these so dont look up AttributeSemantic
-
1
def port_is_external()
-
return self[:is_external] unless self[:is_external].nil?
-
return nil unless self[:is_port]
-
return nil unless self[:semantic_type_summary]
-
(AttributeSemantic::Info[self[:semantic_type_summary]]||{})[:external]
-
end
-
# TODO: modify these so dont look up AttributeSemantic
-
1
def port_type()
-
return self[:port_type_asserted] unless self[:port_type_asserted].nil?
-
return nil unless self[:is_port]
-
return "output" if self[:dynamic]
-
return nil unless self[:semantic_type_summary]
-
(AttributeSemantic::Info[self[:semantic_type_summary]]||{})[:port_type]
-
end
-
-
1
def is_unset()
-
# care must be takedn so this is three-valued
-
return true if attribute_value().nil?
-
return false unless self[:data_type] == "json"
-
return nil unless self[:semantic_type]
-
has_req_fields = AttributeComplexType.has_required_fields_given_semantic_type?(attribute_value(),self[:semantic_type])
-
return nil if has_req_fields.nil?
-
has_req_fields ? false : true
-
end
-
-
# FOR_AMAR
-
1
def self.aug_attr_list_from_state_change_list(state_change_list)
-
ret = Array.new
-
# get all relevant attributes by first finding component ids
-
ndx_scs = Hash.new
-
state_change_list.each do |node_change_list|
-
node_change_list.each do |sc|
-
ndx_scs[sc[:component][:id]] ||= sc
-
end
-
end
-
return ret if ndx_scs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_component_id,:attribute_value,:required,:dynamic],
-
:filter => [:oneof,:component_component_id, ndx_scs.keys]
-
}
-
attr_mh = state_change_list.first.first[:component].model_handle(:attribute)
-
ret = get_objs(attr_mh,sp_hash)
-
ret.each do |attr|
-
sc = ndx_scs[attr[:component_component_id]]
-
attr.merge!(:component => sc[:component], :node => sc[:node])
-
end
-
ret
-
end
-
-
1
def set_attribute_value(attribute_value)
-
# unless SemanticDatatype.is_valid?(self[:semantic_data_type],attribute_value)
-
# raise ErrorUsage.new("The value (#{value.inspect}) is not of type (#{semantic_data_type})")
-
# end
-
update(:value_asserted => attribute_value)
-
self[:value_asserted] = attribute_value
-
end
-
-
1
def self.augmented_attribute_list_from_task(task,opts={})
-
component_actions = task.component_actions
-
ret = Array.new
-
ndx_nodes = Hash.new
-
component_actions.each do |action|
-
AttributeComplexType.flatten_attribute_list(action[:attributes],:flatten_nil_value=>true).each do |attr|
-
ret << attr.merge(:component => action[:component], :node => action[:node],:task_id => task[:id])
-
end
-
if opts[:include_node_attributes]
-
node = action[:node]
-
ndx_nodes[node[:id]] ||= node
-
end
-
end
-
if opts[:include_node_attributes]
-
# TODO: none need flattening now
-
# adding any nodes that are only node_level
-
task.node_level_actions().each do |action|
-
node = action[:node]
-
ndx_nodes[node[:id]] ||= node
-
end
-
node_idhs = ndx_nodes.values.map{|n|n.id_handle()}
-
add_filter = [:eq,:required,true]
-
cols = [:id,:group_id,:display_name,:node_node_id,:required,:value_derived,:value_asserted,:dynamic,:port_type_asserted,:is_port,:semantic_type_summary]
-
Node.get_node_level_attributes(node_idhs,:cols => cols,:add_filter => add_filter).each do |attr|
-
ret << attr.merge(:node => ndx_nodes[attr[:node_node_id]],:task_id => task[:id])
-
end
-
end
-
ret
-
end
-
-
1
def unraveled_attribute_id()
-
qualified_attribute_id_aux()
-
end
-
-
# TODO: may deprecate below
-
1
def qualified_attribute_name_under_node()
-
qualified_attribute_name_aux()
-
end
-
1
def qualified_attribute_id_under_node()
-
qualified_attribute_id_aux()
-
end
-
1
def qualified_attribute_name()
-
node_or_group_name = self.has_key?(:node) ? self[:node][:display_name] : nil
-
qualified_attribute_name_aux(node_or_group_name)
-
end
-
-
1
def id_info_uri()
-
(self[:id_info]||{})[:uri]
-
end
-
-
#######################
-
######### Model apis
-
1
def self.update_from_hash_assignments(id_handle,hash,opts={})
-
update_attribute_def(id_handle,hash,opts)
-
end
-
1
def self.update_attribute_def(id_handle,hash,opts={})
-
attr = id_handle.create_object().update_object!(:config_agent_type,:component_parent)
-
aug_hash = hash.merge(:config_agent_type => attr[:config_agent_type], :component_type => attr[:component_parent][:component_type])
-
internal_form = attr_def_to_internal_form(aug_hash)
-
internal_form
-
# Model.update_from_hash_assignments(id_handle,internal_form,opts)
-
end
-
-
-
1
def print_path(component)
-
return "cmp[#{component[:display_name].gsub('__','::')}]/#{self[:display_name]}"
-
end
-
-
#=============
-
1
def self.update_port_info(attr_mh,attr_link_rows_created)
-
attr_port_info = Array.new
-
attr_link_rows_created.each do |row|
-
# TODO: row[:type].nil? test need sto be changed if attribute link type default is no longer "external"
-
if row[:type].nil? or row[:type] == "external"
-
[["input",row[:input_id]],["output",row[:output_id]]].each do |(dir,id)|
-
attr_port_info << {:id => id, :port_type_asserted => dir, :is_port => true, :is_external => true}
-
end
-
end
-
end
-
update_from_rows(attr_mh,attr_port_info) unless attr_port_info.empty?
-
end
-
-
1
def required_unset_attribute?()
-
# port_type depends on :port_type_asserted,:is_port,:semantic_type_summary and :dynamic
-
update_object!(:required,:value_derived,:value_asserted,:port_type_asserted,:is_port,:semantic_type_summary,:dynamic)
-
if self[:required] and self[:attribute_value].nil? and not self[:dynamic]
-
if self[:port_type] == "input"
-
not has_input_link?()
-
else
-
true
-
end
-
end
-
end
-
-
1
private
-
1
def has_input_link?()
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:eq,:input_id,id()]
-
}
-
not get_obj(model_handle(:attribute_link),sp_hash).empty?
-
end
-
-
1
def self.attr_def_to_internal_form(hash)
-
ret = Hash.new
-
[:required,:id].each{ |k| ret[k] = hash[k] if hash.has_key?(k) }
-
ret[:display_name] = hash[:field_name] if hash.has_key?(:field_name)
-
type_info = Datatype.attr_def_to_internal_form(hash)
-
type_info.each{ |k,v| ret[k] = v }
-
ret[:external_ref] = attr_def_to_internal_form__external_ref(hash)
-
ret[:value_asserted] = hash[:default_info] if hash.has_key?(:default_info)
-
ret
-
end
-
-
1
def self.attr_def_to_internal_form__external_ref(hash)
-
config_agent = ConfigAgent.load(hash[:config_agent_type])
-
config_agent.ret_attribute_external_ref(:component_type => hash[:component_type], :field_name => hash[:field_name])
-
end
-
-
#####################
-
1
public
-
-
### object procssing and access functions
-
1
def qualified_attribute_name_aux(node_or_group_name=nil)
-
cmp_name = self.has_key?(:component) ? self[:component][:display_name] : nil
-
# strip what will be recipe name
-
cmp_el = cmp_name ? cmp_name.gsub(/::.+$/,"") : nil
-
attr_name = self[:display_name]
-
token_array = ([node_or_group_name,cmp_el] + Aux.tokenize_bracket_name(attr_name)).compact
-
AttributeComplexType.serialze(token_array)
-
end
-
1
def qualified_attribute_id_aux(node_or_group_id_formatted=nil)
-
cmp_id = self.has_key?(:component) ? self[:component][:id] : nil
-
cmp_id_formatted = AttributeComplexType.container_id(:component,cmp_id)
-
attr_id_formatted = AttributeComplexType.container_id(:attribute,self[:id])
-
item_path = AttributeComplexType.item_path_token_array(self)||[]
-
token_array = ([node_or_group_id_formatted,cmp_id_formatted,attr_id_formatted] + item_path).compact
-
AttributeComplexType.serialze(token_array)
-
end
-
-
1
def self.unravelled_value(val,path)
-
return nil unless Aux.can_take_index?(val)
-
path.size == 1 ? val[path.first] : unravelled_value(val[path.first],path[1..path.size-1])
-
end
-
-
1
public
-
-
1
def self.create_needed_l4_sap_attributes(cmp_id_handle,ipv4_host_addresses)
-
# TODO: cleanup to use newer model access fns
-
component_id = cmp_id_handle.get_id()
-
field_set = Model::FieldSet.new(:component,[:id,:display_name,:attributes])
-
# TODO: allowing feature in until nest features in base services filter = [:and, [:eq, :component__id, component_id],[:eq, :basic_type,"service"]]
-
filter = [:and, [:eq, :component__id, component_id]]
-
global_wc = {:attribute__semantic_type_summary => "sap_config__l4"}
-
ds = SearchObject.create_from_field_set(field_set,cmp_id_handle[:c],filter).create_dataset().where(global_wc)
-
-
# should only be one attribute matching (or none)
-
component = ds.all.first
-
sap_config_attr = (component||{})[:attribute]
-
return nil unless sap_config_attr
-
sap_config_attr_idh = cmp_id_handle.createIDH(:guid => sap_config_attr[:id],:model_name => :attribute, :parent_model_name => :component)
-
-
# cartesian product of sap_config(s) and host addreses
-
new_sap_value_list = Array.new
-
# TODO: if graph converted hased values into Model types then could just do sap_config_attr[:attribute_value]
-
values = sap_config_attr[:value_asserted]||sap_config_attr[:value_derived]
-
# values can be hash or array; determine by looking at semantic_type
-
# TODO: may use instead look up from semantic type
-
values = [values] unless values.kind_of?(Array)
-
values.each do |sap_config|
-
ipv4_host_addresses.each do |ipv4_addr|
-
new_sap_value_list << sap_config.merge(:host_address => ipv4_addr)
-
end
-
end
-
-
description_prefix = (component[:display_name]||"").split("::").map{|x|x.capitalize}.join(" ")
-
description = description_prefix.empty? ? "Service Access Point" : "#{description_prefix} SAP"
-
-
new_sap_attr_rows =
-
[{
-
:ref => "sap__l4",
-
:display_name => "sap__l4",
-
:component_component_id => component_id,
-
:value_derived => new_sap_value_list,
-
:is_port => true,
-
:hidden => true,
-
:data_type => "json",
-
:description => description,
-
# TODO: need the => {"application" => service qualification)
-
:semantic_type => {":array" => "sap__l4"},
-
:semantic_type_summary => "sap__l4"
-
}]
-
-
attr_mh = sap_config_attr_idh.createMH()
-
new_sap_attr_idh = create_from_rows(attr_mh,new_sap_attr_rows, :convert => true).first
-
-
[sap_config_attr_idh,new_sap_attr_idh]
-
end
-
-
###################################################################
-
##TODO: need to go over each one below to see what we still should use
-
-
1
def check_and_set_derived_relation!()
-
ingress_objects = Model.get_objects(ModelHandle.new(id_handle[:c],:attribute_link),:output_id => self[:id])
-
return nil if ingress_objects.nil?
-
ingress_objects.each{ |input_obj|
-
fn = AttributeLink::ret_function_if_can_determine(input_obj,self)
-
check_and_set_derived_rel_from_link_fn!(fn)
-
}
-
end
-
-
# sets this attribute derived relation from fn given as input; if error throws trap
-
# TBD: may want to pass in more context about input so that can set fn
-
1
def check_and_set_derived_rel_from_link_fn!(fn)
-
return nil if fn.nil?
-
if self[:function].nil?
-
update(:function => fn)
-
return nil
-
end
-
raise Error.new("mismatched link")
-
end
-
-
### virtual column defs
-
# returns asserted first then derived
-
-
1
def unknown_in_attribute_value()
-
attr_value = attribute_value()
-
return true if attr_value.nil?
-
return nil unless self[:is_array]
-
return nil unless attr_value.kind_of?(Array) #TBD: this should be error
-
attr_value.each{|v| return true if v.nil?}
-
return nil
-
end
-
-
1
def assoc_components_on_nodes()
-
parent_obj = get_parent_object()
-
return [] if parent_obj.nil?
-
case parent_obj.relation_type
-
when :node
-
Array.new
-
when :component
-
parent_obj.get_objects_associated_nodes().map do |n|
-
{:node => n, :component => parent_obj}
-
end
-
else
-
raise Error.new("unexpected parent of attribute")
-
end
-
end
-
end
-
end
-
-
-
1
module XYZ
-
1
class DerivedValueFunction
-
1
class << self
-
1
def sap_from_config_and_ip(ip_addr,sap_config)
-
# TBD: stub; ignores config constraints on sap_config
-
return nil if ip_addr.nil? or sap_config.nil?
-
port = sap_config[:network] ? sap_config[:network][:port] : nil
-
return nil if port.nil?
-
{
-
:network => {
-
:port => port,
-
:addresses => [ip_addr]
-
}
-
}
-
end
-
-
1
def sap_ref_from_sap(sap)
-
return nil if sap.nil?
-
# TBD: stubbed to only handle limited cases
-
raise Error::NotImplemented.new("sap to sap ref function where not type 'network'") unless sap[:network]
-
raise Error.new("network sap missing port number") unless sap[:network][:port]
-
raise Error.new("network sap missing addresses") unless sap[:network][:addresses]
-
raise Error::NotImplemented.new("saps with multiple IP addresses") unless sap[:network][:addresses].size == 1
-
{:network => {
-
:port => sap[:network][:port],
-
:address => sap[:network][:addresses][0]
-
}
-
}
-
end
-
end
-
end
-
end
-
1
module XYZ
-
1
module AttributeComplexType
-
1
def self.has_required_fields_given_semantic_type?(obj,semantic_type)
-
pattern = SemanticTypeSchema.create_from_semantic_type(semantic_type)
-
return nil unless pattern
-
has_required_fields?(obj,pattern)
-
end
-
-
1
def self.flatten_attribute_list(attr_list,opts={})
-
ret = Array.new
-
attr_list.each do |attr|
-
value = attr[:attribute_value]
-
if (value.nil? and not opts[:flatten_nil_value]) or not attr[:data_type] == "json"
-
ret << attr
-
else
-
nested_type_pat = SemanticTypeSchema.create_from_semantic_type(attr[:semantic_type])
-
flatten_attribute!(ret,value,attr,nested_type_pat,opts.merge(:top_level=> true))
-
end
-
end
-
ret
-
end
-
-
1
def self.ravel_raw_post_hash(raw_post_hash,type,parent_id=nil)
-
raise Error.new("Unexpected type #{type}") unless type == :attribute #TODO: may teat other types like component
-
indexed_ret = Hash.new
-
ravel_raw_post_hash_attribute!(indexed_ret,raw_post_hash,parent_id)
-
indexed_ret.values
-
end
-
-
1
def self.serialze(token_array)
-
token_array.join(Delim::Common)
-
end
-
-
1
private
-
1
Delim = Model::Delim
-
1
TypeMapping = {
-
:attribute => :a,
-
:component => :c
-
}
-
-
1
def self.item_path_token_array(attr)
-
return nil unless attr[:item_path]
-
attr[:item_path].map{|indx| indx.kind_of?(Numeric) ? "#{Delim::NumericIndex}#{indx.to_s}" : indx.to_s}
-
end
-
1
def self.container_id(type,id)
-
return nil if id.nil?
-
"#{TypeMapping[type.to_sym]}#{Delim::Common}#{id.to_s}"
-
end
-
-
1
def self.ravel_raw_post_hash_attribute!(ret,attributes_hash,parent_id=nil)
-
attributes_hash.each do |k,attr_hash|
-
id,path = (k =~ AttrIdRegexp) && [$1.to_i,$2]
-
next unless id
-
ret[id] ||= {:id => id}
-
-
##TODO: see if parent_id is needed
-
ret[id].merge!(DB.parent_field(:component,:attribute) => parent_id) if parent_id
-
-
if path.empty?
-
ret[id][:value_asserted] = attr_hash
-
else
-
change_paths = ret[id][:change_paths] ||= Array.new
-
change_paths << change_path = Array.new
-
ravel_raw_post_hash_attribute_aux!(ret[id],:value_asserted,attr_hash,path,change_path)
-
end
-
end
-
end
-
1
AttrIdRegexp = Regexp.new("^#{TypeMapping[:attribute]}#{Delim::Common}([0-9]+)(.*$)")
-
-
1
def self.ravel_raw_post_hash_attribute_aux!(ret,index,hash,path,change_path)
-
next_index, rest_path = (path =~ NumericIndexRegexp) && [$1.to_i,$2]
-
if path =~ NumericIndexRegexp
-
next_index, rest_path = [$1.to_i,$2]
-
change_path << next_index
-
ret[index] ||= ArrayObject.new
-
# make sure that ret[index] has enough rows
-
while ret[index].size <= next_index
-
ret[index] << nil
-
end
-
elsif path =~ KeyWithRestRegexp
-
next_index, rest_path = [$1,$2]
-
change_path << next_index
-
ret[index] ||= Hash.new
-
elsif path =~ KeyWORestRegexp
-
next_index, rest_path = [$1,String.new]
-
change_path << next_index
-
ret[index] ||= Hash.new
-
else
-
Log.error("parsing error on path #{path}")
-
end
-
-
if rest_path.empty?
-
ret[index][next_index] = hash
-
else
-
ravel_raw_post_hash_attribute_aux!(ret[index],next_index,hash,rest_path,change_path)
-
end
-
end
-
1
NumericIndexRegexp = Regexp.new("^#{Delim::Common}#{Delim::NumericIndex}([0-9]+)(.*$)")
-
# TODO make sure cahging this is right KeyWithRestRegexp = Regexp.new("^#{Delim::Common}([^#{Delim::Char}]+)#{Delim::Common}(.+$)")
-
1
KeyWithRestRegexp = Regexp.new("^#{Delim::Common}([^#{Delim::Char}]+)(#{Delim::Common}.+$)")
-
1
KeyWORestRegexp = Regexp.new("^#{Delim::Common}(.*$)")
-
-
1
def self.has_required_fields?(value_obj,pattern)
-
# care must be taken to make this three-valued
-
if pattern.is_atomic?()
-
has_required_fields_when_atomic?(value_obj,pattern)
-
elsif pattern.is_array?()
-
has_required_fields_when_array?(value_obj,pattern)
-
else
-
has_required_fields_when_hash?(value_obj,pattern)
-
end
-
end
-
-
1
def self.has_required_fields_when_atomic?(value_obj,pattern)
-
(not pattern[:required]) or not value_obj.nil?
-
end
-
-
1
def self.has_required_fields_when_array?(value_obj,pattern)
-
unless value_obj.kind_of?(Array)
-
Log.error("mismatch between object #{value_obj.inspect} and pattern #{pattern.inspect}")
-
return nil
-
end
-
array_body_pat, can_be_empty = pattern.parse_array()
-
return false if ((not can_be_empty) and value_obj.empty?)
-
value_obj.each do |el|
-
ret = has_required_fields?(el,array_body_pat)
-
return ret unless ret.kind_of?(TrueClass)
-
end
-
true
-
end
-
-
1
def self.has_required_fields_when_hash?(value_obj,pattern)
-
unless value_obj.kind_of?(Hash)
-
Log.error("mismatch between object #{value_obj.inspect} and pattern #{pattern.inspect}")
-
return nil
-
end
-
-
pattern.each do |k,child_pat|
-
el = value_obj[k.to_sym]
-
ret = has_required_fields?(el,child_pat)
-
return ret unless ret.kind_of?(TrueClass)
-
end
-
true
-
end
-
-
# TODO: add "index that will be used to tie unravvled attribute back to the base object and make sure
-
# base object in the attribute
-
1
def self.flatten_attribute!(ret,value_obj,attr,pattern,opts={})
-
if pattern.nil?
-
flatten_attribute_when_nil_pattern!(ret,value_obj,attr,opts)
-
elsif pattern.is_atomic?() and not (value_obj.kind_of?(Array) or value_obj.kind_of?(Hash))
-
flatten_attribute_when_atomic_pattern!(ret,value_obj,attr,pattern,opts)
-
elsif value_obj.kind_of?(Array) or (pattern.is_array?() and value_obj.nil? and opts[:flatten_nil_value])
-
flatten_attribute_when_array!(ret,value_obj,attr,pattern,opts.merge(:top_level=>false))
-
elsif value_obj.kind_of?(Hash) or (pattern.is_hash?() and value_obj.nil? and opts[:flatten_nil_value])
-
flatten_attribute_when_hash!(ret,value_obj,attr,pattern,opts.merge(:top_level=>false))
-
else
-
flatten_attribute_when_mismatch!(ret,value_obj,attr,pattern,opts.merge(:top_level=>false))
-
end
-
nil
-
end
-
-
1
def self.flatten_attribute_when_nil_pattern!(ret,value_obj,attr,opts={})
-
if value_obj && value_obj.kind_of?(Hash)
-
flatten_attribute_when_hash!(ret,value_obj,attr,nil,opts.merge(:top_level=>false))
-
elsif value_obj && value_obj.kind_of?(Array)
-
flatten_attribute_when_array!(ret,value_obj,attr,nil,opts.merge(:top_level=>false))
-
elsif attr[:data_type] == "json" and opts[:top_level]
-
ret << attr
-
else
-
ret << attr.merge(:attribute_value => value_obj,:data_type => "json")
-
end
-
nil
-
end
-
-
1
def self.flatten_attribute_when_atomic_pattern!(ret,value_obj,attr,pattern,opts={})
-
if attr[:data_type] == pattern[:type].to_s and opts[:top_level]
-
ret << attr
-
else
-
flatten_attr = attr.merge(:attribute_value => value_obj,:data_type => pattern[:type].to_s)
-
[:required,:dynamic,:hidden].each{|k|flatten_attr.merge!(k => pattern[k]) unless pattern[k].nil?}
-
ret << flatten_attr
-
end
-
nil
-
end
-
-
1
def self.flatten_attribute_when_array!(ret,value_obj,attr,pattern,opts={})
-
# compute child_list and array_pat if no mismitch
-
child_list = nil
-
if pattern.nil?
-
# TODO: this really not a mismatch, but code still handles correctly
-
return flatten_attribute_when_mismatch!(ret,value_obj,attr,pattern,opts) if (value_obj||[]).empty?
-
child_list = value_obj
-
elsif not pattern[:array]
-
return flatten_attribute_when_mismatch!(ret,value_obj,attr,pattern,opts)
-
elsif (value_obj||[]).empty? and not opts[:flatten_nil_value]
-
ret << attr.merge(:attribute_value => value_obj)
-
return nil
-
else
-
array_pat = pattern[:array]
-
# if nil value_obj then just assume one row
-
child_list = (value_obj||[]).empty? ? [nil] : value_obj
-
end
-
-
child_list.each_with_index do |child_val_obj,i|
-
child_attr =
-
if attr[:item_path]
-
attr.merge(:display_name => "#{attr[:display_name]}#{display_name_num_delim(i)}", :item_path => attr[:item_path] + [i])
-
else
-
attr.merge(:root_display_name => attr[:display_name], :display_name => "#{attr[:display_name]}#{display_name_num_delim(i)}", :item_path => [i])
-
end
-
flatten_attribute!(ret,child_val_obj,child_attr,array_pat,opts)
-
end
-
nil
-
end
-
-
1
def self.flatten_attribute_when_hash!(ret,value_obj,attr,pattern,opts={})
-
# compute child_list if no mismitch
-
child_list = nil
-
if pattern.nil?
-
# TODO: this really not a mismatch, but code still handles correctly
-
return flatten_attribute_when_mismatch!(ret,value_obj,attr,pattern,opts) if (value_obj||{}).empty?
-
child_list = value_obj
-
elsif pattern[:array] or ((value_obj||{}).empty? and not opts[:flatten_nil_value])
-
return flatten_attribute_when_mismatch!(ret,value_obj,attr,pattern,opts)
-
else
-
child_list = (value_obj||{}).empty? ? pattern.inject({}){|h,kv|h.merge(kv[0].to_sym => nil)} : value_obj
-
end
-
-
child_list.each do |k,child_val_obj|
-
child_attr =
-
if attr[:item_path]
-
attr.merge(:display_name => "#{attr[:display_name]}#{display_name_delim(k)}", :item_path => attr[:item_path] + [k.to_sym])
-
else
-
attr.merge(:root_display_name => attr[:display_name], :display_name => "#{attr[:display_name]}#{display_name_delim(k)}", :item_path => [k.to_sym])
-
end
-
child_pattern = pattern && pattern[k.to_s]
-
flatten_attribute!(ret,child_val_obj,child_attr,child_pattern,opts)
-
end
-
nil
-
end
-
-
1
def self.flatten_attribute_when_mismatch!(ret,value_obj,attr,pattern,opts={})
-
Log.error("mismatch between object #{value_obj.inspect} and pattern #{pattern.inspect}")
-
ret << (opts[:top_level] ? attr : attr.merge(:attribute_value => value_obj))
-
nil
-
end
-
-
1
def self.display_name_delim(x)
-
Delim::DisplayName+(x.to_s)
-
end
-
1
def self.display_name_num_delim(x)
-
Delim::DisplayName+Delim::NumericIndex+(x.to_s)
-
end
-
end
-
end
-
1
module DTK
-
1
class Attribute
-
1
module ConstantMixin
-
1
def is_constant?()
-
external_ref = get_field?(:external_ref)
-
Constant.is_constant?(external_ref)
-
end
-
end
-
-
1
class Constant
-
1
attr_reader :datatype,:dependent_attribute,:dependent_component
-
-
1
def same_constant?(c2)
-
dependent_attribute == c2.dependent_attribute and dependent_component == c2.dependent_component
-
end
-
1
def is_in?(constant_array)
-
!!constant_array.find{|c2|same_constant?(c2)}
-
end
-
-
1
def self.side_effect_settings()
-
{'hidden' => true}
-
end
-
-
1
ExternalRefType = "constant"
-
1
def self.ret_external_ref()
-
{"type" => ExternalRefType}
-
end
-
1
def self.is_constant?(external_ref)
-
# this is specifically a symbol because external_ref's keys are symbols
-
if type = (external_ref||{})[:type]
-
type == ExternalRefType
-
end
-
end
-
-
1
def self.create?(constant,dep_attr_ref,dep_cmp,datatype)
-
if is_valid_const?(constant)
-
new(constant,dep_attr_ref,dep_cmp,datatype)
-
end
-
end
-
-
1
def initialize(constant,dep_attr_ref,dep_cmp,datatype)
-
@dependent_attribute = dep_attr_ref
-
@dependent_component = dep_cmp
-
@constant = constant
-
@datatype = datatype.to_s
-
end
-
-
1
ConstantDelim = "___"
-
1
def attribute_name()
-
constant_val_for_attr_name = self.class.constant_val_for_attr_name(@constant)
-
"#{ConstantDelim}constant#{ConstantDelim}#{@dependent_component}#{ConstantDelim}#{@dependent_attribute}#{ConstantDelim}#{constant_val_for_attr_name}"
-
end
-
1
def attribute_value()
-
@constant
-
end
-
-
1
private
-
1
def self.constant_val_for_attr_name(constant)
-
constant.gsub(OtherChars,OtherCharsReplacement)
-
end
-
1
def self.is_valid_const?(constant)
-
!!(constant_val_for_attr_name(constant) =~ AttributeTermRE)
-
end
-
1
SimpleTokenPat = 'a-zA-Z0-9_-' #TODO: should encapuslate with def in model/link_def/parse_serialized_form.rb of SimpleTokenPat
-
1
AttributeTermRE = Regexp.new("^[#{SimpleTokenPat}]+$")
-
1
OtherChars = Regexp.new("[^#{SimpleTokenPat}]")
-
1
OtherCharsReplacement = 'X' #TODO: this is just heuristic; possible naem clash but very unlikely
-
end
-
end
-
end
-
1
module DTK
-
1
class Attribute
-
1
module DanglingLinksClassMixin
-
# aug_attr_links is an array of attribute links (where a specfic one can appear multiple times
-
# aug_attr_links has the dangling link info
-
# it is augmented with
-
# :input_attribute - attribute that is on input side of attribute link
-
# :other_input_link - an atribute link that connects to :input_attribute attribute; can refer to same
-
# link as self does
-
#
-
1
def update_and_propagate_attributes_for_delete_links(attr_mh,aug_attr_links,propagate_opts={})
-
ret = Array.new
-
links_delete_info = links_delete_info(aug_attr_links)
-
return ret if links_delete_info.empty?
-
# find updated attributes
-
updated_attrs = UpdateDerivedValues.update_for_delete_links(attr_mh,links_delete_info)
-
# propagate these changes; if opts[::add_state_changes] then produce state changes
-
propagate_and_optionally_add_state_changes(attr_mh,updated_attrs,propagate_opts)
-
end
-
-
1
private
-
1
def links_delete_info(aug_attr_links)
-
ndx_ret = Hash.new
-
aug_attr_links.each do |link|
-
a_link = link[:other_input_link]
-
if a_link[:type] == "external"
-
input_attribute = link[:input_attribute]
-
attr_id = input_attribute[:id]
-
l = ndx_ret[attr_id] ||= UpdateDerivedValues::Delete::LinkInfo.new(input_attribute)
-
new_el = {
-
:attribute_link_id => a_link[:id],
-
:index_map => a_link[:index_map],
-
}
-
if a_link[:id] == link[:id]
-
l.add_deleted_link!(new_el)
-
else
-
l.add_other_link!(new_el)
-
end
-
end
-
end
-
ndx_ret.values
-
end
-
-
end
-
end
-
end
-
2
module DTK; class Attribute
-
1
module DatatypeMixin
-
1
def ret_datatype()
-
unless st_summary = self[:semantic_type_summary]
-
self[:data_type]
-
else
-
is_array?() ? "array(#{st_summary})" : st_summary
-
end
-
end
-
-
1
def ret_default_info()
-
default = self[:value_asserted]
-
return nil unless default
-
if is_array?()
-
ret = Hash.new
-
hash_semantic_type = semantic_type[:array]
-
default.each_with_index do |d,i|
-
el = ret_default_info__hash(hash_semantic_type,d)
-
el.each{|k,v|ret.merge!("#{k}[#{i.to_s}]" => v)}
-
end
-
ret
-
else
-
Datatype.ret_default_info__hash(semantic_type,default)
-
end
-
end
-
-
1
def convert_value_to_ruby_object()
-
update_object!(:data_type,:value_asserted,:value_derived)
-
Datatype.convert_value_to_ruby_object(self)
-
end
-
-
1
private
-
1
def semantic_type()
-
@semantic_type ||= SemanticTypeSchema.create_from_attribute(self)
-
end
-
1
def is_array?()
-
semantic_type().is_array?()
-
end
-
end
-
-
1
module Datatype
-
1
def self.ret_datatypes()
-
scalar_types = SemanticTypeSchema.ret_scalar_defined_datatypes()
-
scalar_types += ret_builtin_scalar_types()
-
ret = Array.new
-
scalar_types.each do |t|
-
ret << t
-
ret << "array(#{t})"
-
end
-
ret
-
end
-
-
1
def self.datatype_from_ruby_object(obj)
-
if obj.kind_of?(TrueClass) or obj.kind_of?(FalseClass)
-
"boolean"
-
elsif obj.kind_of?(Fixnum)
-
"integer"
-
elsif obj.kind_of?(Hash) or obj.kind_of?(Array)
-
"json"
-
else
-
"string"
-
end
-
end
-
-
1
def self.convert_value_to_ruby_object(attr,opts={})
-
attr_val_field = opts[:value_field]||:attribute_value
-
raw_val = attr[attr_val_field]
-
return nil if raw_val.nil?
-
case (attr[:data_type]||"string")
-
when "string"
-
raw_val
-
when "boolean"
-
case raw_val.to_s
-
when "true" then true
-
when "false" then false
-
else raise_error_msg("boolean",raw_val,attr)
-
end
-
when "integer"
-
if raw_val =~ /^[0-9]+$/
-
raw_val.to_i
-
else
-
raise_error_msg("integer",raw_val,attr)
-
end
-
when "json"
-
# will be converted already
-
raw_val
-
else
-
raise Error.new("Unexpected Datatype (#{attr[:data_type]}) for attribute (#{attr.print_form()})")
-
end
-
end
-
-
1
def self.attr_def_to_internal_form(hash)
-
ret = Hash.new
-
# check if it is an array
-
# TODO: stub fn to check if array
-
datatype = hash[:datatype]
-
return ret unless datatype
-
is_array = nil
-
if datatype =~ /^array\((.+)\)$/
-
datatype = $1
-
is_array = true
-
end
-
if ret_builtin_scalar_types().include?(datatype)
-
ret[:data_type] = datatype
-
else
-
ret[:data_type] = "json"
-
ret[:semantic_type_summary] = datatype
-
ret[:semantic_type] = is_array ? {":array".to_sym => datatype} : datatype
-
end
-
ret
-
end
-
-
1
def self.ret_default_info__hash(hash_semantic_type,default)
-
hash_semantic_type.inject({}) do |h,(k,v)|
-
if v[:dynamic]
-
h
-
else
-
info = Hash.new
-
info.merge!(:required=> v[:required]) if v.has_key?(:required)
-
info.merge!(:type => v[:type])
-
info.merge!(:default_value => default[k]) if default.has_key?(k)
-
h.merge(k => info)
-
end
-
end
-
end
-
-
1
def self.default()
-
"string"
-
end
-
-
1
private
-
1
def self.raise_error_msg(type,val,attr)
-
val_print_form = (val.respond_to?(:to_s) ? val.to_s : val.inspect)
-
raise ErrorUsage.new("Unexpected #{type.to_s.capitalize} Value (#{val_print_form}) for attribute (#{attr.print_form}); use set-attribute to change its value")
-
end
-
-
-
1
def self.ret_builtin_scalar_types()
-
[
-
"string",
-
"integer",
-
"boolean"
-
]
-
end
-
end
-
end; end
-
1
module XYZ
-
1
module AttrDepAnalaysisClassMixin
-
# block params are attr_in,link,attr_out
-
1
def dependency_analysis(aug_attr_list,&block)
-
# find attributes that are required
-
return if aug_attr_list.empty?
-
attr_ids = aug_attr_list.map{|a|a[:id]}.uniq
-
sp_hash = {
-
:cols => [:function,:index_map,:input_id,:output_id],
-
:filter => [:oneof ,:input_id, attr_ids]
-
}
-
sample_attr = aug_attr_list.first
-
attr_link_mh = sample_attr.model_handle(:attribute_link)
-
links_to_trace = get_objects_from_sp_hash(attr_link_mh,sp_hash)
-
-
matches = Array.new
-
aug_attr_list.each do |attr|
-
# ignore any node attribute
-
next unless attr[:component]
-
find_matching_links(attr,links_to_trace).each do |link|
-
# attr is input attribute
-
matches << {:link => link, :attr => attr}
-
end
-
end
-
matches.each do |match|
-
attr_in = match[:attr]
-
link = match[:link]
-
if attr_out = find_matching_output_attr(aug_attr_list,attr_in,link)
-
block.call(attr_in,link,attr_out)
-
end
-
end
-
end
-
-
# block params is guard_rel which is hash with keys guard_attr,link,guarded_attr
-
1
def guarded_attribute_rels(aug_attr_list,&block)
-
Attribute.dependency_analysis(aug_attr_list) do |in_attr,link,out_attr|
-
guard_rel = {
-
:guarded_attr => in_attr,
-
:guard_attr => out_attr,
-
:link => link
-
}
-
block.call(guard_rel) if GuardRel.needs_guard?(guard_rel)
-
end
-
end
-
-
1
private
-
1
def find_matching_output_attr(aug_attr_list,attr_in,link)
-
# TODO: to make more efficient have other find_matching_output_attr__[link_fn]
-
return find_matching_output_attr__eq_indexed(aug_attr_list,attr_in,link) if link[:function] == "eq_indexed"
-
output_id = link[:output_id]
-
aug_attr_list.find do |attr|
-
if attr[:id] == output_id
-
case link[:function]
-
when "eq" then true
-
when "array_append" then true
-
when "select_one"
-
out_item_path = attr[:item_path]
-
out_item_path and (attr_in[:item_path] == out_item_path[1,out_item_path.size-1])
-
else
-
Log.error("not treated when link function is #{link[:function]}")
-
nil
-
end
-
end
-
end
-
end
-
-
1
def find_matching_output_attr__eq_indexed(aug_attr_list,attr_in,link)
-
ret = nil
-
if not (link[:index_map]||[]).size == 1
-
Log.error("not treating index maps with multiple elements")
-
return ret
-
end
-
link_output_index_map = link[:index_map].first[:output]||[]
-
output_id = link[:output_id]
-
aug_attr_list.find do |attr|
-
attr[:id] == output_id and matching_index_maps?(link_output_index_map,attr[:item_path])
-
end
-
end
-
-
1
def matching_index_maps?(index_map,item_path)
-
return true if index_map.empty?
-
return nil unless index_map.size == item_path.size
-
index_map.each_with_index do |el,i|
-
return nil unless item_path[i] == (el.kind_of?(String) ? el.to_sym : el)
-
end
-
true
-
end
-
-
1
def find_matching_links(attr,links)
-
links.select{|link|link[:input_id] == attr[:id] and index_match(link,attr[:item_path])}
-
end
-
-
1
def index_match(link,item_path)
-
ret = nil
-
case link[:function]
-
when "eq","array_append","select_one"
-
ret = true
-
when "eq_indexed"
-
if (link[:index_map]||[]).size > 1
-
Log.error("not treating index maps with multiple elements")
-
end
-
if index_map = ((link[:index_map]||[]).first||{})[:input]
-
if item_path.kind_of?(Array) and index_map.size == item_path.size
-
item_path.each_with_index do |el,i|
-
return nil unless el.to_s == index_map[i].to_s
-
end
-
ret = true
-
end
-
end
-
end
-
ret
-
end
-
-
1
module GuardRel
-
1
def self.needs_guard?(guard_rel)
-
guard_attr,guarded_attr,link = guard_rel[:guard_attr],guard_rel[:guarded_attr],guard_rel[:link]
-
# guard_attr can be null if guard refers to node level attr
-
# TODO: are there any other cases where it can be null; previous text said 'this can happen if guard attribute is in component that ran already'
-
# TODO: below works if guard is node level attr
-
return nil unless guard_attr
-
-
# guarding attributes that are unset and are feed by dynamic attribute
-
# TODO: should we assume that what gets here are only requierd attributes
-
# TODO: removed clause (not guard_attr[:attribute_value]) in case has value that needs to be recomputed
-
return nil unless guard_attr[:dynamic] and unset_guarded_attr?(guarded_attr,link)
-
-
# TODO: clean up; not sure if still needed
-
guard_task_type = (guard_attr[:semantic_type_summary] == "sap__l4" and (guard_attr[:item_path]||[]).include?(:host_address)) ? Task::Action::CreateNode : Task::Action::ConfigNode
-
# right now only using config node to config node guards
-
return nil if guard_task_type == Task::Action::CreateNode
-
true
-
end
-
1
private
-
# if dont know for certain better to err as being a guard
-
1
def self.unset_guarded_attr?(guarded_attr,link)
-
val = guarded_attr[:attribute_value]
-
if val.nil?
-
true
-
elsif link[:function] == "array_append"
-
unset_guarded_attr__array_append?(val,link)
-
end
-
end
-
-
1
def self.unset_guarded_attr__array_append?(guarded_attr_val,link)
-
if input_map = link[:index_map]
-
unless input_map.size == 1
-
raise Error.new("Not treating index map with more than one member")
-
end
-
input_index = input_map.first[:input]
-
unless input_index.size == 1
-
raise Error.new("Not treating input index with more than one member")
-
end
-
input_num = input_index.first
-
unless input_num.kind_of?(Fixnum)
-
raise Error.new("Not treating input index that is non-numeric")
-
end
-
guarded_attr_val.kind_of?(Array) and guarded_attr_val[input_num].nil?
-
else
-
true
-
end
-
end
-
end
-
end
-
end
-
# TODO: will move get methods that will not be deprecating to here or some file underneath a file directory
-
2
module DTK; class Attribute
-
1
module GetMethod
-
1
module Mixin
-
1
def get_attribute_def()
-
update_object!(:id,:display_name,:value_asserted,:required,:external_ref,:dyanmic,:data_type,:semantic_type,:semantic_type_summary,:config_agent_type)
-
ret = Hash.new
-
[:id,:required,:dyanmic].each{|k|ret[k] = self[k] if self[k]}
-
ret[:field_name] = self[:display_name]
-
-
# put in optional key that inidcates implementation attribute
-
impl_attr = ret_implementation_attribute_name_and_type()
-
# default is that implementation attribute name same as r8 attribute name; so omit if default
-
unless self[:display_name] == impl_attr[:name]
-
case impl_attr[:type].to_sym
-
when :puppet then ret.merge!(:puppet_attribute_name => impl_attr[:name])
-
when :chef then ret.merge!(:chef_attribute_name => impl_attr[:name])
-
end
-
end
-
ret[:datatype] = ret_datatype()
-
-
if default_info = ret_default_info()
-
ret[:default_info] = default_info
-
end
-
ret
-
end
-
-
1
def get_constraints!(opts={})
-
Log.error("opts not implemented yet") unless opts.empty?
-
dependency_list = get_objects_col_from_sp_hash({:columns => [:dependencies]},:dependencies)
-
Constraints.new(:or,dependency_list.map{|dep|Constraint.create(dep)})
-
end
-
-
1
def get_node(opts={})
-
unless node_node_id = get_field?(:node_node_id)
-
raise Error.new("get_node should not be called if attribute not on a node")
-
end
-
sp_hash = {
-
:cols => opts[:cols]||[:id,:group_id,:display_name],
-
:filter => [:eq,:id,node_node_id]
-
}
-
ret = Node.get_obj(model_handle(:node),sp_hash)
-
if subclass_model_name = opts[:subclass_model_name]
-
ret = ret.create_subclass_obj(subclass_model_name)
-
end
-
ret
-
end
-
-
1
def self.get_port_info(id_handles)
-
get_objects_in_set_from_sp_hash(id_handles,{:cols => [:port_info]},{:keep_ref_cols => true})
-
end
-
-
1
def get_service_node_group(opts={})
-
get_node(opts.merge(:subclass_model_name => :service_node_group))
-
end
-
-
1
private
-
1
def ret_implementation_attribute_name_and_type()
-
config_agent = ConfigAgent.load(self[:config_agent_type])
-
config_agent && config_agent.ret_attribute_name_and_type(self)
-
end
-
end
-
-
1
module ClassMixin
-
1
def get_attribute_from_identifier(identifier, mh, cmp_id)
-
valid_attribute = nil
-
if identifier.to_s =~ /^[0-9]+$/
-
sp_hash = {
-
:cols => Attribute.common_columns(),
-
:filter => [:eq,:id,identifier]
-
}
-
-
valid_attribute = Model.get_obj(mh,sp_hash)
-
raise ErrorUsage.new("Illegal identifier '#{identifier}' for component-module attribute") unless valid_attribute
-
else
-
# extracting component and attribute name from identifier
-
# e.g. cmp[dtk_addons::rspec2db]/user => component_name = dtk_addons::rspec2db, attribute_name = user
-
match_from_identifier = identifier.match(/.+\[(.*)\]\/(.*)/)
-
-
if match_from_identifier
-
param_cmp_name = match_from_identifier[1].gsub(/::/,'__')
-
param_attr_name = match_from_identifier[2].gsub(/::/,'__')
-
end
-
-
raise ErrorUsage.new("Illegal identifier '#{identifier}' for component-module attribute") unless param_attr_name && param_cmp_name
-
-
sp_hash = {
-
# component_module_parent will return more info about attribute (component it belongs to and module branch which we can get component_module_id from)
-
:cols => common_columns + [:component_module_parent],
-
:filter => [:eq, :display_name, param_attr_name]
-
}
-
matching_attributes = Model.get_objs(mh,sp_hash)
-
-
# every component attribute has external_ref field with info ({"type":"puppet_attribute","path":"node[logrotate__rule][copytruncate]"})
-
# using external_ref[:path] to extract component_name (logrotate__rule) and attribute_name (copytruncate)
-
# and compare to data that user have sent as params
-
matching_attributes.each do |m_attr|
-
if (external_ref = m_attr[:external_ref]) && (path = m_attr[:external_ref][:path])
-
match = path.match(/.+\[(.*)\]\[(.*)\]/)
-
cmp_name, attr_name = match[1], match[2] if match
-
-
if module_branch = m_attr[:module_branch]
-
valid_attribute = m_attr if param_cmp_name.eql?(cmp_name) && param_attr_name.eql?(attr_name) && module_branch[:component_id].to_s.eql?(cmp_id)
-
end
-
break if valid_attribute
-
end
-
end
-
-
raise ErrorUsage.new("Illegal identifier '#{identifier}' for component-module attribute") unless valid_attribute
-
end
-
-
valid_attribute
-
end
-
-
1
def get_augmented(model_handle,filter)
-
ret = Array.new
-
sp_hash = {
-
:cols => common_columns + [:node_component_info],
-
:filter => filter
-
}
-
attrs = get_objs(model_handle,sp_hash)
-
return ret if attrs.empty?
-
attrs.each do |r|
-
r.delete(:component) if r[:component].nil? #get rid of nil :component cols
-
-
if node = r.delete(:direct_node)||r.delete(:component_node)
-
r.merge!(:node => node)
-
end
-
end
-
attrs
-
end
-
-
-
end
-
end
-
end; end
-
-
-
1
module XYZ
-
1
module AttributeGroupClassMixin
-
# marked with "!" because augments info
-
1
def ret_grouped_attributes!(augmented_attr_list,opts={})
-
prune_set = opts[:types_to_keep]
-
add_missing_info_for_group_attrs!(augmented_attr_list)
-
-
ret = Array.new
-
augmented_attr_list.each do |attr|
-
type = attr.attribute_value_type()
-
ret << attr.merge(:attr_val_type => type) if prune_set.nil? or type.type_of?(*prune_set)
-
end
-
ret
-
end
-
1
private
-
# adds port type info and required
-
1
def add_missing_info_for_group_attrs!(augmented_attr_list)
-
dependency_analysis(augmented_attr_list) do |attr_in,link,attr_out|
-
attr_in.merge!(:port_type => "input")
-
if attr_in[:required] and attr_out and not attr_out[:dynamic]
-
attr_out.merge!(:required => true)
-
end
-
end
-
end
-
end
-
-
1
class AttrValType
-
1
def self.create(type,attr)
-
klass = AttrValTypeMap[type.to_sym]
-
if klass then klass.new(type,attr)
-
else raise Error.new("attribute value type (#{type}) not treated")
-
end
-
end
-
-
# disjunction of types
-
1
def type_of?(*types)
-
types.find do |type|
-
unless type_klass = AttrValTypeMap[type]
-
Log.error("illegal type given #{type}")
-
next
-
end
-
self.kind_of?(type_klass)
-
end ? true : nil
-
end
-
-
1
private
-
1
def initialize(type,attr)
-
@type=type.to_sym
-
@is_set = attr[:attribute_value] ? true : false #TODO: handling legitimate nil values
-
end
-
end
-
-
# TODO: if dont have type hierarchy then can simplify
-
1
class AttrValTypeRequired < AttrValType
-
end
-
1
class AttrValTypeNotRequired < AttrValType
-
end
-
1
class AttrValTypeDynamic < AttrValType
-
end
-
1
class AttrValTypeLinked < AttrValType
-
end
-
-
1
AttrValTypeMap = {
-
:required => AttrValTypeRequired,
-
:not_required => AttrValTypeNotRequired,
-
:dynamic => AttrValTypeDynamic,
-
:linked => AttrValTypeLinked,
-
}
-
-
1
module AttributeGroupInstanceMixin
-
1
def attribute_value_type()
-
type =
-
# TODO: need to clean up special processing of sap__l4 because marked as output port but also input port (from internal connections)
-
if self[:semantic_type_summary] == "sap__l4" then :linked
-
elsif self[:dynamic] then :dynamic
-
elsif self[:port_type] == "input" then :linked
-
elsif self[:required] then :required
-
else :not_required
-
end
-
raise Error.new("Cannot detect type of attribute") unless type
-
AttrValType.create(type,self)
-
end
-
end
-
end
-
2
module DTK; class Attribute
-
1
class LegalValue
-
1
def self.raise_usage_errors?(existing_attrs,ndx_new_vals)
-
errors = ErrorsUsage.new
-
existing_attrs.each do |a|
-
new_val = ndx_new_vals[a[:id]]
-
special_processing,error = SpecialProcessing::ValueCheck.error_special_processing?(a,new_val)
-
if special_processing
-
errors << error if error
-
else
-
# TODO: stub for normal error processing
-
end
-
end
-
unless errors.empty?
-
raise errors
-
end
-
end
-
1
class Error < ErrorUsage
-
1
def initialize(attr,new_val,info={})
-
super(error_msg(attr,new_val,info))
-
end
-
1
private
-
1
def error_msg(attr,new_val,info)
-
attr_name = attr[:display_name]
-
ret = "Attribute (#{attr}) has illegal value (#{new_val})"
-
if legal_vals = info[:legal_values]
-
ident = " "*2;
-
sep = "--------------"
-
ret << "; legal values are: \n#{sep}\n#{ident}#{legal_vals.join("\n#{ident}")}"
-
ret << "\n#{sep}\n"
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
-
# TODO: temp until move into meta directory
-
2
module XYZ; class Attribute
-
# TOOD: hack taht can be removed when update_object allows virtual types
-
1
module VirtulaDependency
-
1
def self.port_type()
-
1
[:dynamic,:is_port,:port_type_asserted,:semantic_type_summary]
-
end
-
end
-
-
1
module MetaClassMixin
-
1
def up()
-
1
external_ref_column_defs()
-
1
virtual_column :config_agent_type, :type => :string, :local_dependencies => [:external_ref]
-
1
virtual_column :title, :type => :string, :local_dependencies => [:value_asserted,:value_derived,:external_ref,:display_name]
-
-
# columns related to the value
-
1
column :value_asserted, :json, :ret_keys_as_symbols => false
-
1
column :value_derived, :json, :ret_keys_as_symbols => false
-
1
column :is_instance_value, :boolean, :default => false #to distinguish between when value_asserted is from default versus directly asserted
-
# TODO: not used yet column :value_actual, :json, :ret_keys_as_symbols => false
-
# TODO: may rename attribute_value to desired_value
-
1
virtual_column :attribute_value, :type => :json, :local_dependencies => [:value_asserted,:value_derived],
-
:sql_fn => SQL::ColRef.coalesce(:value_asserted,:value_derived)
-
-
# TODO: should collapse the semantic types
-
# columns related to the data/semantic type
-
1
column :data_type, :varchar, :size => 25
-
1
column :semantic_data_type, :varchar, :size => 25
-
1
column :semantic_type, :json #points to structural info for a json var
-
1
column :semantic_type_summary, :varchar, :size => 25 #for efficiency optional token that summarizes info from semantic_type
-
1
virtual_column :semantic_type_object, :type => :object, :hidden => true, :local_dependencies => [:semantic_type]
-
-
-
# TODO: may be able to remove some feilds and use tags to store them
-
1
column :tags, :json
-
-
###cols that relate to who or what can or does change the attribute
-
# TODO: need to clearly relate these four; may get rid of read_only
-
1
column :read_only, :boolean, :default => false
-
1
column :dynamic, :boolean, :default => false #means dynamically set by an executable action
-
1
column :cannot_change, :boolean, :default => false
-
-
-
1
column :required, :boolean, :default => false #whether required for this attribute to have a value inorder to execute actions for parent component; TODO: may be indexed by action
-
1
column :hidden, :boolean, :default => false
-
-
# columns related to links
-
# TODO: for succinctness may use less staorage and colapse a number of port attributes
-
1
column :port_location, :varchar, :size => 10 #if set is override for port direction: east | west | south | north
-
1
column :is_port, :boolean, :default => false
-
1
column :port_type_asserted, :varchar, :size => 10
-
1
column :is_external, :boolean
-
-
1
virtual_column :port_type, :type => :varchar, :hidden => true, :local_dependencies => VirtulaDependency.port_type()
-
-
1
virtual_column :port_is_external, :type => :boolean, :hidden => true, :local_dependencies => [:is_port,:is_external,:semantic_type_summary]
-
-
1
virtual_column :is_unset, :type => :boolean, :hidden => true, :local_dependencies => [:value_asserted,:value_derived,:data_type,:semantic_type]
-
-
1
virtual_column :parent_name, :possible_parents => [:component,:node]
-
1
many_to_one :component, :node
-
1
one_to_many :dependency #for ports indicating what they can connect to
-
-
1
virtual_column :dependencies, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :dependency,
-
:alias => :dependencies,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:attribute_attribute_id => q(:attribute,:id)},
-
:cols => [:id,:search_pattern,:type,:description,:severity]
-
}]
-
-
1
virtual_column :component_parent, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :component_parent,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:id => p(:attribute,:component)},
-
:cols => [:id,:display_name,:component_type,:most_specific_type,:connectivity_profile_external,:ancestor_id,:node_node_id,:extended_base_id]
-
}]
-
-
1
virtual_column :component_module_parent, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:join_type => :inner,
-
:join_cond=>{:id=> :attribute__component_component_id},
-
:cols=>[:id,:display_name,:module_branch_id]
-
},
-
{
-
:model_name => :module_branch,
-
:join_type => :inner,
-
:join_cond=>{:id=> :component__module_branch_id},
-
:cols=>[:id, :component_id]
-
}
-
]
-
-
# finds both component parents with node and dircet node parent
-
1
virtual_column :node_component_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :node,
-
:convert => true,
-
:alias => :direct_node,
-
:join_type => :left_outer,
-
:join_cond=>{:id => p(:attribute,:node)},
-
:cols => [:id,:display_name,:group_id]
-
},
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:id => p(:attribute,:component)},
-
:cols => [:id,:display_name,:group_id,:component_type,:node_node_id]
-
},
-
{
-
:model_name => :node,
-
:convert => true,
-
:alias => :component_node,
-
:join_type => :left_outer,
-
:join_cond=>{:id => p(:component,:node)},
-
:cols => [:id,:display_name,:group_id]
-
}]
-
-
1
virtual_column :port_info, :type => :boolean, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :port,
-
:alias => :port_external,
-
:join_type => :inner,
-
:filter => [:eq,:type,"external"],
-
:join_cond=>{:external_attribute_id => q(:attribute,:id)},
-
:cols => [:id,:type,id(:node),:containing_port_id,:external_attribute_id,:ref]
-
},
-
{
-
:model_name => :port,
-
:alias => :port_l4,
-
:join_type => :left_outer,
-
:filter => [:eq,:type,"l4"],
-
:join_cond=>{:id => q(:port_external,:containing_port_id)},
-
:cols => [:id,:type,id(:node),:containing_port_id,:external_attribute_id,:ref]
-
}]
-
-
1
uri_remote_dependencies =
-
{:uri =>
-
[
-
{
-
:model_name => :id_info,
-
:join_cond=>{:relation_id => :attribute__id},
-
:cols=>[:relation_id,:uri]
-
}
-
]
-
}
-
1
virtual_column :id_info_uri, :hidden => true, :remote_dependencies => uri_remote_dependencies
-
-
1
virtual_column :unraveled_attribute_id, :type => :varchar, :hidden => true #TODO put in depenedncies
-
-
# TODO: may deprecate
-
1
virtual_column :qualified_attribute_name_under_node, :type => :varchar, :hidden => true #TODO put in depenedncies
-
1
virtual_column :qualified_attribute_id_under_node, :type => :varchar, :hidden => true #TODO put in depenedncies
-
1
virtual_column :qualified_attribute_name, :type => :varchar, :hidden => true #not giving dependences because assuming right base_object included in col list
-
-
1
virtual_column :linked_attributes, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :attribute_link,
-
:join_type => :inner,
-
:join_cond=>{:output_id=> :attribute__id},
-
:cols=>[:output_id,:input_id,:function,:index_map]
-
},
-
{
-
:model_name => :attribute,
-
:alias => :input_attribute,
-
:join_type => :inner,
-
:join_cond=>{:id=> :attribute_link__input_id},
-
:cols=>[:id, :value_asserted,:value_derived,:semantic_type,:display_name]
-
}
-
]
-
-
end
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class Pattern
-
-
1
r8_nested_require('pattern','type')
-
1
r8_nested_require('pattern','assembly')
-
1
r8_nested_require('pattern','node')
-
1
r8_nested_require('pattern','term')
-
-
1
def self.node_name()
-
(pattern =~ NodeComponentRegexp ? $1 : raise_unexpected_pattern(pattern))
-
end
-
1
def self.component_fragment(pattern)
-
(pattern =~ NodeComponentRegexp ? $2 : raise_unexpected_pattern(pattern))
-
end
-
1
def self.attribute_fragment(pattern)
-
(pattern =~ AttrRegexp ? $1 : raise_unexpected_pattern(pattern))
-
end
-
1
Delim = "#{Term::EscpLDelim}[^#{Term::EscpRDelim}]*#{Term::EscpRDelim}"
-
1
DelimWithSelect = "#{Term::EscpLDelim}([^#{Term::EscpRDelim}]*)#{Term::EscpRDelim}"
-
-
1
NodeComponentRegexp = Regexp.new("^node#{DelimWithSelect}\/(component.+$)")
-
1
AttrRegexp = Regexp.new("node[^\/]*\/component#{Delim}\/(attribute.+$)")
-
-
1
def self.raise_unexpected_pattern(pattern)
-
raise Error.new("Unexpected that pattern (#{pattern}) did not match")
-
end
-
1
private_class_method :raise_unexpected_pattern
-
-
1
def self.create_attr_pattern(base_object,attr_term,opts={})
-
create(attr_term,base_object,opts).set_parent_and_attributes!(base_object.id_handle(),opts)
-
end
-
-
# set_attributes can create or set attributes depending on options in opts
-
# returns attribute patterns
-
1
def self.set_attributes(base_object,av_pairs,opts={})
-
ret = Array.new
-
attribute_rows = Array.new
-
ambiguous = Array.new
-
attr_properties = opts[:attribute_properties]||{}
-
attributes = base_object.list_attributes(Opts.new(:with_assembly_wide_node => true))
-
-
av_pairs.each do |av_pair|
-
value = av_pair[:value]
-
if semantic_data_type = attr_properties[:semantic_data_type]
-
if value
-
unless SemanticDatatype.is_valid?(semantic_data_type,value)
-
raise ErrorUsage.new("The value (#{value.inspect}) is not of type (#{semantic_data_type})")
-
end
-
end
-
end
-
-
# if service instance has components check if there is a node with same name as component
-
# if true then it is ambiguous whether using node or component attribute
-
check_ambiguity(attributes, av_pair, ambiguous, opts) if base_object.has_assembly_wide_node?()
-
-
# if needed as indicated by opts, create_attr_pattern also creates attribute
-
pattern = create_attr_pattern(base_object,av_pair[:pattern],opts)
-
ret << pattern
-
# attribute_idhs are base level attribute id_handles; in contrast to
-
# node_group_member_attribute_idhs, which gives non null set if attribute is on a node and node is a service_node_group
-
# purpose of finding node_group_member_attribute_idhs is when explicitly setting node group attribute want to set
-
# all its members to same value; only checking for component level and not node level because
-
# node level attributes different for each node member
-
attr_idhs = pattern.attribute_idhs
-
ngm_attr_idhs = pattern.kind_of?(Type::ComponentLevel) ? pattern.node_group_member_attribute_idhs : []
-
# TODO: modify; rather than checking datatype; convert attribute value, which might be in string form to right ruby data type
-
# do not need to check value validity if opts[:create] (since checked already)
-
unless opts[:create]
-
attr_idhs.each do |attr_idh|
-
unless pattern.valid_value?(value,attr_idh)
-
raise ErrorUsage.new("The value (#{value.inspect}) is not of type (#{pattern.semantic_data_type(attr_idh)})")
-
end
-
end
-
end
-
-
all_attr_idhs = attr_idhs
-
unless ngm_attr_idhs.empty?
-
if opts[:create]
-
raise ErrorUsage.new("Not supported creating attributes on a node group")
-
end
-
all_attr_idhs += ngm_attr_idhs
-
end
-
all_attr_idhs.each do |idh|
-
attribute_rows << {:id => idh.get_id(),:value_asserted => value}.merge(attr_properties)
-
end
-
end
-
-
# return if ambiguous whether component or node attribute (node and component have same name)
-
return {:ambiguous => ambiguous} unless ambiguous.empty?
-
-
# attribute_rows can have multiple rows if pattern decomposes into multiple attributes
-
# it should have at least one row or there is an error
-
if attribute_rows.empty?
-
if opts[:create]
-
raise ErrorUsage.new("Unable to create a new attribute")
-
else
-
raise ErrorUsage.new("The attribute specified does not match an existing attribute in the assembly")
-
end
-
end
-
-
attr_ids = attribute_rows.map{|r|r[:id]}
-
attr_mh = base_object.model_handle(:attribute)
-
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:node_node_id,:component_component_id],
-
:filter => [:oneof,:id,attribute_rows.map{|a|a[:id]}]
-
}
-
existing_attrs = Model.get_objs(attr_mh,sp_hash,opts)
-
ndx_new_vals = attribute_rows.inject(Hash.new){|h,r|h.merge(r[:id] => r[:value_asserted])}
-
LegalValue.raise_usage_errors?(existing_attrs,ndx_new_vals)
-
-
SpecialProcessing::Update.handle_special_processing_attributes(existing_attrs,ndx_new_vals)
-
Attribute.update_and_propagate_attributes(attr_mh,attribute_rows,opts)
-
ret
-
end
-
-
1
def self.is_assembly_node_component(attributes, pattern)
-
matching_attr = attributes.find { |attr| attr[:display_name].eql?("assembly_wide/#{pattern}") }
-
matching_attr ? matching_attr[:display_name] : pattern
-
end
-
-
1
def self.check_ambiguity(attributes, av_pair, ambiguous, opts)
-
return if opts[:node_attribute]
-
pattern = av_pair[:pattern]
-
-
# if user wants component-attribute we find attribute from assembly wide node
-
# if not specified then check for ambiguity, if ambiguous return error message
-
# else return node or component (assembly_wide) attribute
-
if opts[:component_attribute]
-
match = attributes.find { |attr| attr[:display_name].eql?("assembly_wide/#{pattern}") }
-
raise ErrorUsage.new("Service instance component attribute #{pattern} does not exist") unless match
-
av_pair[:pattern] = match[:display_name]
-
else
-
matching_attr = attributes.select { |attr| attr[:display_name].eql?(pattern) || attr[:display_name].eql?("assembly_wide/#{pattern}") }
-
if matching_attr.size > 1
-
ambiguous << pattern
-
else
-
av_pair[:pattern] = matching_attr.first[:display_name] if matching_attr.size == 1
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class Pattern
-
1
class Assembly < self
-
1
r8_nested_require('assembly','link')
-
-
1
def self.create(attr_term,assembly,opts={})
-
# considering attribute id to belong to any format so processing here
-
if attr_term =~ /^[0-9]+$/
-
return Type::ExplicitId.new(attr_term,assembly)
-
end
-
-
format = opts[:format]||Format::Default
-
klass =
-
case format
-
when :simple then Simple
-
when :canonical_form then CanonicalForm
-
else raise Error.new("Unexpected format (#{format})")
-
end
-
klass.create(attr_term,opts)
-
end
-
-
1
class Simple
-
1
def self.create(attr_term,opts={})
-
tokens = attr_term.split("/")
-
case tokens.size
-
when 1
-
Type::AssemblyLevel.new(t(:attribute,tokens[0]))
-
when 2
-
Type::NodeLevel.new("#{t(:node,tokens[0])}/#{t(:attribute,tokens[1])}")
-
else
-
# handling in a way that can correctly parse the case where have node/cmp_type[title]/attr and title can have '/'
-
# This needs to be coorinated with ComponentTitle.parse_component_display_name
-
node_part = tokens.shift
-
attr_part = tokens.pop
-
cmp_part = tokens.join('/')
-
Type::ComponentLevel.new("#{t(:node,node_part)}/#{t(:component,cmp_part)}/#{t(:attribute,attr_part)}")
-
end
-
end
-
1
private
-
1
def self.t(type,term)
-
Pattern::Term.canonical_form(type,term)
-
end
-
end
-
-
1
class CanonicalForm
-
1
def self.create(attr_term,opts={})
-
# can be an assembly, node or component level attribute
-
if attr_term =~ /^attribute/
-
Type::AssemblyLevel.new(attr_term)
-
elsif attr_term =~ /^node[^\/]*\/component/
-
Type::ComponentLevel.new(attr_term)
-
elsif attr_term =~ /^node[^\/]*\/attribute/
-
Type::NodeLevel.new(attr_term)
-
else
-
raise ErrorUsage::Parsing::Term.new(attr_term)
-
end
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Attribute::Pattern
-
1
class Assembly
-
1
class Link < self
-
1
r8_nested_require('link','source')
-
1
r8_nested_require('link','target')
-
-
1
class Info
-
1
def initialize(parsed_adhoc_links,dep_component_instance,antec_component_instance)
-
@links = parsed_adhoc_links
-
@dep_component_instance = dep_component_instance
-
@antec_component_instance = antec_component_instance
-
@meta_update_supported = (!dep_component_instance.nil? and !antec_component_instance.nil?)
-
end
-
1
attr_reader :links,:dep_component_instance,:antec_component_instance
-
1
def meta_update_supported?()
-
@meta_update_supported
-
end
-
1
def dep_component_template()
-
@dep_component_template ||= @dep_component_instance.get_component_template_parent()
-
end
-
1
def antec_component_template()
-
@antec_component_template ||= @antec_component_instance.get_component_template_parent()
-
end
-
end
-
-
# returns object of type Info
-
1
def self.parsed_adhoc_link_info(parent,assembly,target_attr_term,source_attr_term)
-
assembly_idh = assembly.id_handle()
-
target_attr_pattern = Target.create_attr_pattern(assembly,target_attr_term)
-
if target_attr_pattern.attribute_idhs.empty?
-
raise ErrorUsage.new("No matching attribute to target term (#{target_attr_term})")
-
end
-
source_is_antecdent = !target_attr_pattern.is_antecedent?()
-
source_attr_pattern = Source.create_attr_pattern(assembly,source_attr_term,source_is_antecdent)
-
unless source_component_instance = source_attr_pattern.component_instance
-
raise DSLNotSupported::LinkToNonComponent.new()
-
end
-
source_component_instance = source_attr_pattern.component_instance
-
if source_component_instance[:component_type] == target_attr_pattern.component_instance[:component_type]
-
raise DSLNotSupported::LinkBetweenSameComponentTypes.new(source_component_instance)
-
end
-
-
# TODO: need to do more checking and processing to include:
-
# if has a relation set already and scalar conditionally reject or replace
-
# if has relation set already and array, ...
-
attr_info = {
-
:assembly_id => assembly_idh.get_id(),
-
:output_id => source_attr_pattern.attribute_idh.get_id()
-
}
-
if fn = source_attr_pattern.fn()
-
attr_info.merge!(:function => fn)
-
end
-
-
parsed_adhoc_links = target_attr_pattern.attribute_idhs.map do |target_attr_idh|
-
hash = attr_info.merge(:input_id => target_attr_idh.get_id())
-
parent.new(hash,target_attr_pattern.attribute_pattern,source_attr_pattern)
-
end
-
dep_cmp,antec_cmp = determine_dep_and_antec_components(target_attr_pattern,source_attr_pattern)
-
Info.new(parsed_adhoc_links,dep_cmp,antec_cmp)
-
end
-
-
1
private
-
1
def self.determine_dep_and_antec_components(target_attr_pattern,source_attr_pattern)
-
unless target_cmp = target_attr_pattern.component_instance()
-
raise Error.new("Unexpected that target_attr_pattern.component() is nil")
-
end
-
source_cmp = source_attr_pattern.component_instance()
-
-
antec_cmp,dep_cmp =
-
if target_attr_pattern.is_antecedent?()
-
[target_cmp,source_cmp]
-
else
-
[source_cmp,target_cmp]
-
end
-
[dep_cmp,antec_cmp]
-
end
-
-
end
-
end
-
end; end
-
2
module DTK; class Attribute::Pattern
-
2
class Assembly; class Link
-
# for attribute relation sources
-
1
class Source < self
-
1
def self.create_attr_pattern(base_object,source_attr_term,source_is_antecdent)
-
attr_term,fn,node_cmp_type = Simple.parse(source_attr_term) ||
-
VarEmbeddedInText.parse(source_attr_term)
-
unless attr_term
-
raise ErrorUsage::Parsing::Term.new(source_attr_term,:source_attribute)
-
end
-
attr_pattern = super(base_object,attr_term)
-
if node_cmp_type
-
attr_pattern.set_component_instance!(node_cmp_type)
-
local_or_remote = (source_is_antecdent ? :remote : :local)
-
attr_pattern.local_or_remote = local_or_remote
-
end
-
-
new(attr_pattern,fn,attr_term)
-
end
-
-
1
attr_reader :attribute_pattern,:fn
-
1
def attribute_idh()
-
@attribute_pattern.attribute_idhs.first
-
end
-
1
def component_instance()
-
@attribute_pattern.component_instance()
-
end
-
-
1
private
-
1
def initialize(attr_pattern,fn,attr_term)
-
attr_idhs = attr_pattern.attribute_idhs
-
if attr_idhs.empty?
-
raise ErrorUsage.new("The term (#{attr_term}) does not match an attribute")
-
elsif attr_idhs.size > 1
-
raise ErrorUsage.new("Source attribute term must match just one, not multiple attributes")
-
end
-
@attribute_pattern = attr_pattern
-
@fn = fn
-
end
-
-
1
module Simple
-
1
def self.parse(source_term)
-
if source_term =~ /^\$([a-zA-Z\-_0-9:\.\[\]\/]+$)/
-
attr_term_x = $1
-
fn = nil
-
attr_term,node_cmp_type = strip_special_symbols(attr_term_x)
-
[attr_term,fn,node_cmp_type]
-
end
-
end
-
-
1
private
-
# TODO: need better way to do this; there is alsso an ambiguity if component level attribute host_address
-
# returns [attr_term,node_cmp_type] where last term can be nil
-
1
def self.strip_special_symbols(attr_term)
-
ret = [attr_term,nil]
-
split = attr_term.split('/')
-
if split.size == 3 and split[2] == 'host_address'
-
node_part,cmp_part,attr_part = split
-
ret = ["#{node_part}/#{attr_part}",cmp_part]
-
end
-
ret
-
end
-
end
-
-
1
module VarEmbeddedInText
-
1
def self.parse(source_term)
-
# TODO: change after fix stripping off of ""
-
if source_term =~ /(^[^\$]*)\$\{([^\}]+)\}(.*)/
-
str_part1 = $1
-
attr_term = $2
-
str_part2 = $3
-
fn = {
-
:function => {
-
:name => :var_embedded_in_text,
-
:constants => {
-
:text_parts => [str_part1,str_part2]
-
}
-
}
-
}
-
node_cmp_type = nil
-
[attr_term,fn,node_cmp_type]
-
end
-
end
-
-
end
-
end
-
end; end
-
end; end
-
-
2
module DTK; class Attribute::Pattern
-
2
class Assembly; class Link
-
1
class Target < self
-
1
def self.create_attr_pattern(base_object,target_attr_term)
-
attr_pattern = super(base_object,strip_special_symbols(target_attr_term))
-
new(attr_pattern,target_attr_term)
-
end
-
-
1
attr_reader :attribute_pattern
-
1
def attribute_idhs()
-
@attribute_pattern.attribute_idhs()
-
end
-
1
def component_instance()
-
@attribute_pattern.component_instance()
-
end
-
1
def is_antecedent?()
-
@is_antecedent
-
end
-
-
1
private
-
1
def initialize(attr_pattern,target_attr_term)
-
@attribute_pattern = attr_pattern
-
@is_antecedent = compute_if_antecedent?(target_attr_term)
-
end
-
-
1
def compute_if_antecedent?(target_attr_term)
-
!!(target_attr_term =~ /^\*/)
-
end
-
1
def self.strip_special_symbols(target_attr_term)
-
target_attr_term.gsub(/^\*/,'')
-
end
-
-
end
-
end; end
-
end; end
-
2
module DTK; class Attribute
-
1
class Pattern
-
1
class Node < self
-
1
def self.create(pattern,node,opts={})
-
if pattern =~ /^[0-9]+$/
-
return Type::ExplicitId.new(pattern,node)
-
end
-
split_term = pattern.split("/")
-
node_name = node.get_field?(:display_name)
-
case split_term.size
-
when 1
-
Type::NodeLevel.new("node[#{node_name}]/attribute[#{split_term[0]}]")
-
when 2
-
Type::ComponentLevel.new("node[#{node_name}]/component[#{split_term[0]}]/attribute[#{split_term[1]}]")
-
else
-
raise ErrorUsage::Parsing::Term.new(pattern,:node_attribute)
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class Pattern
-
-
1
module Term
-
1
def self.canonical_form(type,term)
-
"#{type}#{LDelim}#{term}#{RDelim}"
-
end
-
1
def self.extract_term?(canonical_form)
-
if canonical_form =~ FilterFragmentRegexp
-
$1
-
end
-
end
-
1
LDelim = '<'
-
1
RDelim = '>'
-
1
EscpLDelim = "\\#{LDelim}"
-
1
EscpRDelim = "\\#{RDelim}"
-
1
FilterFragmentRegexp = Regexp.new("[a-z]#{EscpLDelim}([^#{EscpRDelim}]+)#{EscpRDelim}")
-
end
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class Pattern
-
1
class Type
-
1
r8_nested_require('type','explicit_id')
-
1
r8_nested_require('type','assembly_level')
-
# common_node_component_level must be before node_level and component_level
-
1
r8_nested_require('type','common_node_component_level')
-
1
r8_nested_require('type','node_level')
-
1
r8_nested_require('type','component_level')
-
-
1
def initialize(pattern)
-
@pattern = pattern
-
end
-
-
1
attr_writer :created
-
1
def created?()
-
@created
-
end
-
1
def attribute_properties()
-
@attribute_properties||{}
-
end
-
1
def set_attribute_properties!(attr_properties)
-
@attribute_properties = attr_properties
-
end
-
-
1
def valid_value?(value,attribute_idh=nil)
-
attr = attribute_stack(attribute_idh)[:attribute]
-
if semantic_data_type = attr[:semantic_data_type]
-
# value comes as array inside string "[1, 2, 3]"; using JSON.parse to convert it to [1, 2, 3]
-
value = JSON.parse(value) if semantic_data_type.eql?('array')
-
SemanticDatatype.is_valid?(semantic_data_type,value)
-
else
-
# vacuously true
-
true
-
end
-
end
-
-
1
def semantic_data_type(attribute_idh=nil)
-
attribute_stack(attribute_idh)[:attribute][:semantic_data_type]
-
end
-
-
# can be overwritten
-
1
def node_group_member_attribute_idhs()
-
Array.new
-
end
-
-
1
private
-
1
attr_reader :pattern, :id
-
-
1
def create_this_type?(opts)
-
if create = opts[:create]
-
create.kind_of?(TrueClass) or
-
(create.kind_of?(String) and create == 'true') or
-
(create.kind_of?(Array) and create.include?(type()))
-
end
-
end
-
-
-
-
1
def attribute_stack(attribute_idh=nil)
-
if attribute_idh
-
attr_id = attribute_idh.get_id()
-
unless match = @attribute_stacks.find{|as|as[:attribute].id == attr_id}
-
raise Error.new("Unexpceted that no match to attribute_id in attribute_stack")
-
end
-
match
-
else
-
unless @attribute_stacks.size == 1
-
raise Error.new("attribute_stack() should only be called when @attribute_stacks.size == 1")
-
end
-
@attribute_stacks.first
-
end
-
end
-
-
# parent will be node_idh or assembly_idh
-
1
def ret_matching_nodes(parent_idh)
-
if parent_idh[:model_name] == :node
-
return [parent_idh]
-
end
-
filter = [:eq, :assembly_id, parent_idh.get_id()]
-
if node_filter = ret_filter(pattern,:node)
-
filter = [:and, filter, node_filter]
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => filter
-
}
-
Model.get_objs(parent_idh.createMH(:node),sp_hash)
-
end
-
-
1
def ret_matching_components(nodes,cmp_fragment)
-
filter = [:oneof, :node_node_id, nodes.map{|r|r.id()}]
-
if cmp_filter = ret_filter(cmp_fragment,:component)
-
filter = [:and, filter, cmp_filter]
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_type,:node_node_id,:ancestor_id],
-
:filter => filter
-
}
-
cmp_mh = nodes.first.model_handle(:component)
-
Model.get_objs(cmp_mh,sp_hash).map{|r|Component::Instance.create_from_component(r)}
-
end
-
-
1
def ret_matching_attributes(type,idhs,attr_fragment)
-
filter = [:oneof, TypeToIdField[type], idhs.map{|idh|idh.get_id()}]
-
if attr_filter = ret_filter(attr_fragment,:attribute)
-
filter = [:and, filter, attr_filter]
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:external_ref,:semantic_data_type,TypeToIdField[type]],
-
:filter => filter
-
}
-
sample_idh = idhs.first
-
Model.get_objs(sample_idh.createMH(:attribute),sp_hash)
-
end
-
1
TypeToIdField = {
-
:component => :component_component_id,
-
:node => :node_node_id
-
}
-
-
1
def ret_filter(fragment,type)
-
unless term = Pattern::Term.extract_term?(fragment)
-
return nil #without qualification means all (no filter)
-
end
-
if term == "*"
-
return nil
-
end
-
display_name = (type == :component ? ::DTK::Component::Instance.display_name_from_user_friendly_name(term) : term)
-
if type == :node and ::DTK::Node.legal_display_name?(display_name)
-
[:eq,:display_name,display_name]
-
elsif type == :component and ::DTK::Component::Instance.legal_display_name?(display_name)
-
[:eq,:display_name,display_name]
-
elsif type == :attribute and Attribute.legal_display_name?(display_name)
-
[:eq,:display_name,display_name]
-
else
-
# TODO: check why have :component_segment
-
raise ErrorUsage::Parsing::Term.new(term,:component_segment)
-
end
-
end
-
-
end
-
end
-
end; end
-
-
2
module DTK; class Attribute
-
2
class Pattern; class Type
-
1
class AssemblyLevel < self
-
1
def type()
-
:assembly_level
-
end
-
-
1
def attribute_idhs()
-
@attribute_stacks.map{|attr|attr[:attribute].id_handle()}
-
end
-
-
1
def component_instance()
-
nil
-
end
-
-
1
def set_parent_and_attributes!(assembly_idh,opts={})
-
attributes = ret_matching_attributes(:component,[assembly_idh],pattern)
-
# if does not exist then create the attribute if create option is true
-
# if exists and create flag exsists we just assign it new value
-
if attributes.empty? and create_this_type?(opts)
-
af = ret_filter(pattern,:attribute)
-
# attribute must have simple form
-
unless af.kind_of?(Array) and af.size == 3 and af[0..1] == [:eq,:display_name]
-
raise Error.new("cannot create new attribute from attribute pattern #{pattern}")
-
end
-
attr_properties = opts[:attribute_properties]||Hash.new
-
unless attr_properties.empty?
-
if attr_properties[:dynamic]
-
raise ErrorUsage.new("Illegal to include teh :dynamic option on an assembly level attribute")
-
elsif attr_properties[:required]
-
raise Error.new("The option :required not yet supported on assembly leveal attributes")
-
end
-
set_attribute_properties!(attr_properties)
-
end
-
@created = true
-
field_def = {"display_name" => af[2]}
-
attribute_idhs = assembly_idh.create_object().create_or_modify_field_def(field_def)
-
attributes = attribute_idhs.map do |idh|
-
attr = idh.create_object()
-
attr.update_object!(:display_name)
-
attr
-
end
-
end
-
assembly = assembly_idh.create_object()
-
assembly.update_object!(:display_name)
-
@attribute_stacks = attributes.map do |attr|
-
{
-
:assembly => assembly,
-
:attribute => attr
-
}
-
end
-
self
-
end
-
end
-
end; end
-
end; end
-
2
module DTK; class Attribute
-
2
class Pattern; class Type
-
1
module CommonNodeComponentLevel
-
1
def attribute_name()
-
attribute_stack()[:attribute][:display_name]
-
end
-
1
def attribute_id()
-
attribute_stack()[:attribute].id()
-
end
-
1
def component_instance()
-
attribute_stack()[:component]
-
end
-
1
def component_instances()
-
@attribute_stacks.map{|as|as[:component]}.compact
-
end
-
1
def node()
-
attribute_stack()[:node]
-
end
-
-
1
def attribute_idhs()
-
@attribute_stacks.map{|r|r[:attribute].id_handle()}
-
end
-
1
def node_group_member_attribute_idhs()
-
ret = Array.new
-
@attribute_stacks.each do |r|
-
if r[:node].is_node_group?()
-
ret += attribute_idhs_on_service_node_group(r[:attribute])
-
end
-
end
-
ret
-
end
-
-
1
private
-
1
def create_attributes(attr_parents)
-
attribute_idhs = Array.new
-
attr_properties = attribute_properties().inject(Hash.new){|h,(k,v)|h.merge(k.to_s => v)}
-
field_def =
-
{'display_name' => pattern_attribute_name()}.merge(attr_properties)
-
attr_parents.each do |attr_parent|
-
attribute_idhs += Attribute.create_or_modify_field_def(attr_parent,field_def)
-
end
-
-
return attribute_idhs if attribute_idhs.empty?
-
-
# TODO: can make more efficient by having create_or_modify_field_def return object with cols, rather than id_handles
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:description,:component_component_id,:data_type,:semantic_type,:required,:dynamic,:external_ref,:semantic_data_type],
-
:filter => [:oneof,:id,attribute_idhs.map{|idh|idh.get_id()}]
-
}
-
attr_mh = attribute_idhs.first.createMH()
-
Model.get_objs(attr_mh,sp_hash)
-
end
-
-
1
def attribute_idhs_on_service_node_group(node_group_attribute)
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id],
-
:filter => [:eq,:ancestor_id,node_group_attribute.id()]
-
}
-
attr_mh = node_group_attribute.model_handle()
-
Model.get_objs(attr_mh,sp_hash).map{|r|r.id_handle()}
-
end
-
-
1
def pattern_attribute_name()
-
first_name_in_fragment(pattern_attribute_fragment())
-
end
-
-
1
def first_name_in_fragment(fragment)
-
fragment =~ NameInFragmentRegexp
-
$1
-
end
-
1
NameInFragmentRegexp = /[^<]*<([^>]*)>/
-
end
-
end; end
-
end; end
-
-
2
module DTK; class Attribute
-
2
class Pattern; class Type
-
1
class ComponentLevel < self
-
1
include CommonNodeComponentLevel
-
-
1
def create_attribute_on_template(cmp_template,opts={})
-
new_attr = create_attributes([cmp_template]).first
-
if update_dsl = opts[:update_dsl]
-
unless module_branch = update_dsl[:module_branch]
-
raise Error.new("If update_dsl is specified then module_branch must be provided")
-
end
-
module_branch.incrementally_update_component_dsl([new_attr],:component_template => cmp_template)
-
end
-
new_attr.id_handle()
-
end
-
-
1
def type()
-
:component_level
-
end
-
-
1
def match_attribute_mapping_endpoint?(am_endpoint)
-
am_endpoint[:type] == 'component_attribute' and
-
am_endpoint[:component_type] == component_instance()[:component_type] and
-
am_endpoint[:attribute_name] == attribute_name()
-
end
-
-
1
def am_serialized_form()
-
"#{component_instance()[:component_type]}.#{attribute_name()}"
-
end
-
-
1
def set_parent_and_attributes!(parent_idh,opts={})
-
ret = self
-
@attribute_stacks = Array.new
-
ndx_nodes = ret_matching_nodes(parent_idh).inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
if ndx_nodes.empty?
-
if create_this_type?(opts)
-
raise ErrorUsage.new("Node name (#{pattern_node_name()}) in attribute does not match an existing node")
-
end
-
return ret
-
end
-
-
cmp_fragment = pattern_component_fragment()
-
ndx_cmps = ret_matching_components(ndx_nodes.values,cmp_fragment).inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
if ndx_cmps.empty?
-
if create_this_type?(opts)
-
raise ErrorUsage.new("Component name (#{pattern_component_name()}) in attribute does not match an existing component in node (#{pattern_node_name()})")
-
end
-
return ret
-
end
-
-
attr_fragment = pattern_attribute_fragment()
-
attrs = ret_matching_attributes(:component,ndx_cmps.values.map{|r|r.id_handle()},attr_fragment)
-
if attrs.empty? and create_this_type?(opts)
-
@created = true
-
set_attribute_properties!(opts[:attribute_properties]||{})
-
attrs = create_attributes(ndx_cmps.values)
-
end
-
@attribute_stacks = attrs.map do |attr|
-
cmp = ndx_cmps[attr[:component_component_id]]
-
# TODO: this shoudl be done more internally
-
fill_in_external_ref?(attr,cmp)
-
{
-
:attribute => attr,
-
:component => cmp,
-
:node => ndx_nodes[cmp[:node_node_id]]
-
}
-
end
-
ret
-
end
-
1
private
-
1
def fill_in_external_ref?(attr,component)
-
unless attr.get_field?(:external_ref)
-
component_type = component.get_field?(:component_type)
-
attr_name = attr.get_field?(:display_name)
-
external_ref = attr[:external_ref] = {
-
# TODO: hard coded and not centralized logic
-
:type => 'puppet_attribute',
-
:path => "node[#{component_type}][#{attr_name}]"
-
}
-
attr.update({:external_ref => external_ref},:convert => true)
-
end
-
attr
-
end
-
-
1
def pattern_node_name()
-
Pattern.node_name(pattern())
-
end
-
1
def pattern_component_fragment()
-
Pattern.component_fragment(pattern())
-
end
-
1
def pattern_attribute_fragment()
-
Pattern.attribute_fragment(pattern())
-
end
-
1
def pattern_component_name()
-
first_name_in_fragment(pattern_component_fragment())
-
end
-
end
-
end; end
-
end; end
-
-
2
module DTK; class Attribute
-
2
class Pattern; class Type
-
1
class ExplicitId < self
-
1
def initialize(pattern,parent_obj)
-
super(pattern)
-
@id = pattern.to_i
-
if parent_obj.kind_of?(::DTK::Node)
-
raise_error_if_not_node_attr_id(@id,parent_obj)
-
elsif parent_obj.kind_of?(::DTK::Assembly)
-
raise_error_if_not_assembly_attr_id(@id,parent_obj)
-
else
-
raise Error.new("Unexpected parent object type (#{parent_obj.class.to_s})")
-
end
-
end
-
-
1
def type()
-
:explicit_id
-
end
-
-
1
attr_reader :attribute_idhs
-
-
1
def set_parent_and_attributes!(parent_idh,opts={})
-
@attribute_idhs = [parent_idh.createIDH(:model_name => :attribute, :id => id())]
-
self
-
end
-
-
1
def valid_value?(value,attribute_idh=nil)
-
# TODO: not testing yet valid_value? for explicit_id type
-
# vacuously true
-
true
-
end
-
-
1
private
-
1
def raise_error_if_not_node_attr_id(attr_id,node)
-
unless node.get_node_and_component_attributes().find{|r|r[:id] == attr_id}
-
raise ErrorUsage.new("Illegal attribute id (#{attr_id.to_s}) for node")
-
end
-
end
-
1
def raise_error_if_not_assembly_attr_id(attr_id,assembly)
-
unless assembly.get_attributes_all_levels().find{|r|r[:id] == attr_id}
-
raise ErrorUsage.new("Illegal attribute id (#{attr_id.to_s}) for assembly")
-
end
-
end
-
end
-
end; end
-
end; end
-
2
module DTK; class Attribute
-
2
class Pattern; class Type
-
1
class NodeLevel < self
-
1
include CommonNodeComponentLevel
-
-
1
def type()
-
:node_level
-
end
-
-
1
def match_attribute_mapping_endpoint?(am_endpoint)
-
am_endpoint[:type] == 'node_attribute' and
-
attr_name_normalize(am_endpoint[:attribute_name]) == attr_name_normalize(attribute_name())
-
end
-
-
1
def am_serialized_form()
-
"#{local_or_remote()}_node.#{attribute_name()}"
-
end
-
-
1
def set_parent_and_attributes!(parent_idh,opts={})
-
ret = self
-
@attribute_stacks = Array.new
-
ndx_nodes = ret_matching_nodes(parent_idh).inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
return ret if ndx_nodes.empty?
-
-
pattern =~ /^node[^\/]*\/(attribute.+$)/
-
attr_fragment = attr_name_special_processing($1)
-
attrs = ret_matching_attributes(:node,ndx_nodes.values.map{|r|r.id_handle()},attr_fragment)
-
if attrs.empty? and create_this_type?(opts)
-
@created = true
-
set_attribute_properties!(opts[:attribute_properties]||{})
-
attrs = create_attributes(ndx_nodes.values)
-
end
-
-
@attribute_stacks = attrs.map do |attr|
-
{
-
:attribute => attr,
-
:node => ndx_nodes[attr[:node_node_id]]
-
}
-
end
-
ret
-
end
-
-
1
def set_component_instance!(component_type)
-
cmp_fragment = Term.canonical_form(:component,component_type)
-
matching_cmps = ret_matching_components([node()],cmp_fragment)
-
if matching_cmps.empty?
-
raise ErrorUsage.new("Illegal component reference (#{component_type})")
-
elsif matching_cmps.size > 1
-
raise Error.new("Unexpected that ret_matching_components wil return more than 1 match")
-
end
-
attribute_stack()[:component] = matching_cmps.first
-
end
-
-
1
attr_writer :local_or_remote
-
-
1
private
-
1
def pattern_attribute_fragment()
-
pattern() =~ AttrRegexp
-
$1
-
end
-
1
AttrRegexp = /^node[^\/]*\/(attribute.+$)/
-
-
1
def local_or_remote()
-
unless @local_or_remote
-
raise Error.new("local_or_remote() is caleld when @local_or_remote not set")
-
end
-
@local_or_remote
-
end
-
-
1
def attr_name_normalize(attr_name)
-
if attr_name == 'host_addresses_ipv4'
-
'host_address'
-
else
-
attr_name
-
end
-
end
-
1
def attr_name_special_processing(attr_fragment)
-
# TODO: make this obtained from shared logic
-
if attr_fragment == Pattern::Term.canonical_form(:attribute,'host_address')
-
Pattern::Term.canonical_form(:attribute,'host_addresses_ipv4')
-
else
-
attr_fragment
-
end
-
end
-
-
end
-
end; end
-
end; end
-
1
module DTK
-
1
class Attribute
-
1
module PrintFormMixin
-
1
def print_form(opts=Opts.new)
-
update_object!(*PrintForm::UpdateCols)
-
PrintForm.print_form(self,opts)
-
end
-
end
-
-
1
module PrintFormClassMixin
-
1
def print_form(raw_attrs,opts=Opts.new)
-
ret = raw_attrs.map{|a|a.print_form(opts)}
-
if opts.array(:detail_to_include).include?(:attribute_links)
-
unless assembly = opts[:assembly]
-
raise Error.new("Unexpected to have opts[:assembly] nil")
-
end
-
PrintForm.augment_with_attribute_links!(ret,assembly,raw_attrs)
-
end
-
ret
-
end
-
-
end
-
-
1
module Format
-
# possible valeus are [:canonical,:simple]
-
1
Default = :simple
-
end
-
-
1
class PrintForm
-
1
def self.print_form(aug_attr,opts=Opts.new)
-
new(aug_attr,opts).print_form()
-
end
-
-
1
def print_form()
-
attr_name = attr_name_special_processing() || attr_name_default()
-
-
attr_info = {
-
:name => attr_name,
-
:display_name => "#{@display_name_prefix}#{attr_name}",
-
:datatype => datatype_print_form(),
-
:description => @aug_attr[:description]||@aug_attr[:display_name]
-
}
-
value = value_print_form()
-
unless value.nil?()
-
if @truncate_attribute_value
-
truncate_size = (@truncate_attribute_value.kind_of?(Fixnum) ? @truncate_attribute_value : DefaultTruncateSize)
-
if value.kind_of?(String) and value.size > truncate_size
-
value = "#{value[0..truncate_size-1]} #{TruncateSymbols}"
-
end
-
end
-
attr_info.merge!(:value => value)
-
end
-
@aug_attr.hash_subset(*PrintForm::UnchangedDisplayCols).merge(attr_info)
-
end
-
1
UnchangedDisplayCols = [:id,:required]
-
1
UpdateCols = UnchangedDisplayCols + [:description,:display_name,:data_type,:value_derived,:value_asserted]
-
1
DefaultTruncateSize = 45
-
1
TruncateSymbols = '...'
-
-
1
def self.augment_with_attribute_links!(ret,assembly,raw_attributes)
-
ndx_attrs = raw_attributes.inject(Hash.new){|h,a|h.merge(a[:id] => a)}
-
ndx_attr_mappings = Hash.new
-
assembly.get_augmented_attribute_mappings().each do |r|
-
ndx = r[:input_id]
-
pntr = ndx_attr_mappings[ndx] ||= Array.new
-
output_id = r[:output_id]
-
unless pntr.find{|m|m[:id] == output_id}
-
opts = Opts.new
-
if output_index_map = r[:output_index_map]
-
opts.merge!(:index_map => output_index_map)
-
end
-
ndx_attr_mappings[ndx] << r[:output].print_form(opts)
-
end
-
end
-
ret.each do |r|
-
attr_id = r[:id]
-
if linked_to_obj = ndx_attr_mappings[attr_id]
-
r.merge!(:linked_to => linked_to_obj,:linked_to_display_form => linked_to_display_form(linked_to_obj))
-
else
-
ext_ref = (ndx_attrs[attr_id]||{})[:external_ref]||{}
-
if ext_ref[:default_variable] and ext_ref[:type] == 'puppet_attribute'
-
r.merge!(:linked_to_display_form => LinkedToPuppetHeader)
-
end
-
end
-
end
-
ret
-
end
-
-
1
private
-
1
def initialize(aug_attr,opts=Opts.new)
-
@aug_attr = aug_attr #needs to be done first
-
@display_name_prefix = opts[:display_name_prefix] || display_name_prefix(opts.slice(:format, :with_assembly_wide_node).merge(:level => opts[:level]||find_level()))
-
@index_map = opts[:index_map]
-
@truncate_attribute_value = opts[:truncate_attribute_values]
-
@raw_attribute_value = opts[:raw_attribute_value]
-
@mark_unset_required = opts[:mark_unset_required]
-
end
-
-
1
def self.linked_to_display_form(linked_to_obj)
-
linked_to_obj.map{|r|r[:display_name]}.join(', ')
-
end
-
1
LinkedToPuppetHeader = 'external_ref(puppet_header)'
-
-
1
def attr_name_default()
-
index_map_string = (@index_map ? @index_map.inspect() : "")
-
"#{@aug_attr[:display_name]}#{index_map_string}"
-
end
-
1
def attr_name_special_processing()
-
if @aug_attr[:semantic_type_summary] == "host_address_ipv4" and @index_map == [0]
-
"host_address"
-
end
-
end
-
-
1
def display_name_prefix(opts=Opts.new)
-
level = opts.required(:level)
-
format = DisplayNamePrefixFormats[opts[:format]||Format::Default][level]
-
case level
-
when :assembly
-
format
-
when :node
-
format.gsub(/\$node/,node()[:display_name])
-
when :component
-
node = node()
-
if node[:type].eql?('assembly_wide') && !opts[:with_assembly_wide_node]
-
format.gsub(/\$node\//,'').gsub(/\$component/,component().display_name_print_form())
-
else
-
format.gsub(/\$node/,node[:display_name]).gsub(/\$component/,component().display_name_print_form())
-
end
-
end
-
end
-
-
1
DisplayNamePrefixFormats = {
-
:simple => {
-
:assembly => "",
-
:node => "$node/",
-
:component => "$node/$component/"
-
},
-
:canonical => {
-
:assembly => "",
-
:node => "node[$node]/",
-
:component => "node[$node]/cmp[$component]/"
-
}
-
}
-
-
1
def value_print_form(opts={})
-
value = (opts.has_key?(:nested_val) ? opts[:nested_val] : @aug_attr[:attribute_value])
-
if value.nil?
-
ret =
-
if opts[:nested]
-
PrintValueNil
-
else
-
if @mark_unset_required and @aug_attr[:required]
-
# dont mark as required input ports since they will be propagated
-
unless @aug_attr[:is_port] and @aug_attr[:port_type_asserted] == 'input'
-
PrintValueUnsetRequired
-
end
-
end
-
end
-
return ret
-
end
-
-
if @raw_attribute_value
-
return SemanticDatatype.convert_to_internal_form(@aug_attr[:semantic_data_type],value)
-
end
-
-
if value.kind_of?(Array)
-
"[#{value.map{|el|value_print_form(:nested_val=>el,:nested=>true)}.join(', ')}]"
-
# value.inspect
-
elsif value.kind_of?(Hash)
-
comma = ''
-
internal = value.inject(String.new) do |s,(k,val)|
-
item = s + comma
-
comma = ', '
-
el = value_print_form(:nested_val=>val,:nested=>true)
-
"#{item}#{k}=>#{el}"
-
end
-
"{#{internal}}"
-
# value.inspect
-
elsif [String,Fixnum,TrueClass,FalseClass].find{|t|value.kind_of?(t)}
-
value
-
else
-
value.inspect
-
end
-
end
-
1
PrintValueUnsetRequired = '*REQUIRED*'
-
1
PrintValueNil = 'nil'
-
-
1
def datatype_print_form()
-
# TODO: until will populate node/os_identifier attribute with the node_template_type datatype
-
if @aug_attr[:display_name] == 'os_identifier' and @aug_attr[:node]
-
return 'node_template_type'
-
end
-
@aug_attr[:semantic_data_type]||@aug_attr[:data_type]
-
end
-
-
1
def node()
-
@aug_attr[:node]
-
end
-
1
def component()
-
@aug_attr[:component]||@aug_attr[:nested_component]
-
end
-
1
def find_level()
-
if node()
-
component() ? :component : :node
-
else
-
:assembly
-
end
-
end
-
end
-
end
-
end
-
2
module DTK; class Attribute
-
1
module PropagateChangesClassMixin
-
# assume attribute_rows all have :value_asserted or all have :value_derived
-
1
def update_and_propagate_attributes(attr_mh,attribute_rows,opts={})
-
ret = Array.new
-
return ret if attribute_rows.empty?
-
sample = attribute_rows.first
-
val_field = (sample.has_key?(:value_asserted) ? :value_asserted : :value_derived)
-
old_val_field = "old_#{val_field}".to_sym
-
-
attr_idhs = attribute_rows.map{|r|attr_mh.createIDH(:id => r[:id])}
-
ndx_existing_values = get_objs_in_set(attr_idhs,:columns => [:id,val_field]).inject({}) do |h,r|
-
h.merge(r[:id] => r)
-
end
-
-
# prune attributes change paths for attrribues taht have not changed
-
ndx_ch_attr_info = Hash.new
-
attribute_rows.each do |r|
-
id = r[:id]
-
if ndx_existing_values[id].nil?
-
ndx_ch_attr_info[id] = Aux::hash_subset(r,[:id,val_field])
-
next
-
end
-
-
new_val = r[val_field]
-
existing_val = ndx_existing_values[id][val_field]
-
if r[:change_paths]
-
r[:change_paths].each do |path|
-
next if unravelled_value(new_val,path) == unravelled_value(existing_val,path)
-
ndx_ch_attr_info[id] ||= Aux::hash_subset(r,[:id,val_field]).merge(:change_paths => Array.new,old_val_field => existing_val)
-
ndx_ch_attr_info[id][:change_paths] << path
-
end
-
elsif not (existing_val == new_val)
-
ndx_ch_attr_info[id] = Aux::hash_subset(r,[:id,val_field]).merge(old_val_field => existing_val)
-
end
-
end
-
-
return ret if ndx_ch_attr_info.empty?
-
changed_attrs_info = ndx_ch_attr_info.values
-
-
update_rows = changed_attrs_info.map do |r|
-
row = Aux::hash_subset(r,[:id,val_field])
-
row.merge!(:is_instance_value => (val_field == :value_asserted))
-
row
-
end
-
-
# make actual changes in database
-
opts_update = {:partial_value => true}.merge(Aux.hash_subset(opts,:partial_value))
-
update_from_rows(attr_mh,update_rows,opts_update)
-
-
propagate_and_optionally_add_state_changes(attr_mh,changed_attrs_info,opts)
-
end
-
-
1
def update_and_propagate_dynamic_attributes(attr_mh,dyn_attr_val_info)
-
attribute_rows = dyn_attr_val_info.map{|r|{:id => r[:id], dynamic_attribute_value_field() => r[:attribute_value]}}
-
update_and_propagate_attributes(attr_mh,attribute_rows,:add_state_changes => false)
-
end
-
-
1
def propagate_and_optionally_add_state_changes(attr_mh,changed_attrs_info,opts={})
-
return Array.new if changed_attrs_info.empty?
-
# default is to add state changes
-
add_state_changes = ((not opts.has_key?(:add_state_changes)) or opts[:add_state_changes])
-
-
change_hashes_to_propagate = create_change_hashes(attr_mh,changed_attrs_info,opts)
-
direct_scs = (add_state_changes ? StateChange.create_pending_change_items(change_hashes_to_propagate) : Array.new)
-
ndx_nested_change_hashes = propagate_changes(change_hashes_to_propagate)
-
indirect_scs = (add_state_changes ? StateChange.create_pending_change_items(ndx_nested_change_hashes.values) : Array.new)
-
direct_scs + indirect_scs
-
end
-
-
1
def propagate_changes(change_hashes)
-
ret = Hash.new
-
return ret if change_hashes.empty?
-
output_attr_idhs = change_hashes.map{|ch|ch[:new_item]}
-
scalar_attrs = [:id,:value_asserted,:value_derived,:semantic_type]
-
attr_link_rows = get_objs_in_set(output_attr_idhs,:columns => scalar_attrs + [:linked_attributes])
-
-
# dont propagate to attributes with asserted values TODO: push this restriction into search pattern
-
attr_link_rows.reject!{|r|(r[:input_attribute]||{})[:value_asserted]}
-
return ret if attr_link_rows.empty?
-
-
# output_id__parent_idhs used to splice in parent_id (if it exists
-
output_id__parent_idhs = change_hashes.inject({}) do |h,ch|
-
h.merge(ch[:new_item].get_id() => ch[:parent])
-
end
-
-
attrs_links_to_update = attr_link_rows.map do |r|
-
output_attr = Aux::hash_subset(r,scalar_attrs)
-
{
-
:input_attribute => r[:input_attribute],
-
:output_attribute => output_attr,
-
:attribute_link => r[:attribute_link],
-
:parent_idh => output_id__parent_idhs[output_attr[:id]]
-
}
-
end
-
attr_mh = output_attr_idhs.first.createMH() #first is just a sample
-
AttributeLink.propagate(attr_mh,attrs_links_to_update)
-
end
-
-
1
def clear_dynamic_attributes_and_their_dependents(attrs,opts={})
-
ret = Array.new
-
return ret if attrs.empty?
-
attribute_rows = attrs.map do |attr|
-
{
-
:id => attr[:id],
-
dynamic_attribute_value_field() => dynamic_attribute_clear_value(attr)
-
}
-
end
-
attr_mh = attrs.first.model_handle()
-
update_and_propagate_attributes(attr_mh,attribute_rows,opts)
-
end
-
1
private
-
-
1
def create_change_hashes(attr_mh,changed_attrs_info,opts={})
-
ret = Array.new
-
# use sample attribute to find containing datacenter
-
sample_attr_idh = attr_mh.createIDH(:id => changed_attrs_info.first[:id])
-
-
add_state_changes = ((not opts.has_key?(:add_state_changes)) or opts[:add_state_changes])
-
# TODO: anymore efficieny way do do this; can pass datacenter in fn
-
# TODO: when in nested call want to use passed in parent
-
parent_idh = (add_state_changes ? sample_attr_idh.get_top_container_id_handle(:datacenter) : nil)
-
changed_attrs_info.map do |r|
-
hash = {
-
:new_item => attr_mh.createIDH(:id => r[:id]),
-
:parent => parent_idh,
-
:change => {
-
# TODO: check why before it had a json encode on values
-
# think can then just remove below
-
# :old => json_form(r[old_val_index]),
-
# :new => json_form(r[val_index])
-
:old => r[:old_value_asserted] || r[:old_value_derived],
-
:new => r[:value_asserted] || r[:value_derived]
-
}
-
}
-
hash.merge!(:change_paths => r[:change_paths]) if r[:change_paths]
-
hash
-
end
-
end
-
-
1
def dynamic_attribute_value_field()
-
:value_derived
-
end
-
1
def dynamic_attribute_clear_value(attr)
-
attr.kind_of?(Array) ? attr.map{|x|nil} : nil
-
end
-
end
-
end; end
-
-
# TODO: initially enterred through the simple dsl; may then put in model that uses db persistence, but caches this
-
1
module DTK
-
1
class Attribute
-
1
class SemanticDatatype
-
1
r8_nested_require('semantic_datatype','dsl_builder')
-
1
extend SemanticDatatypeClassMixin
-
1
include SemanticDatatypeMixin
-
-
1
attr_reader :datatype
-
1
def initialize(name)
-
10
@name = name.to_s
-
10
@datatype = nil
-
10
@parent = nil
-
10
@validation_proc = nil
-
end
-
# this must be placed here
-
1
r8_nested_require('semantic_datatype','asserted_datatypes')
-
-
1
def self.default()
-
DefaultDatatype
-
end
-
1
DefaultDatatype = :string
-
-
1
def self.convert_and_raise_error_if_not_valid(semantic_data_type,value,opts={})
-
if value.nil?
-
return nil
-
end
-
unless is_valid?(semantic_data_type,value)
-
if opts[:attribute_name]
-
raise ErrorUsage.new("Attribute (#{opts[:attribute_name]}) has default value (#{value.inspect}) that does not match its type (#{semantic_data_type})")
-
else
-
raise ErrorUsage.new("The attribute value (#{value.inspect}) does not match its type (#{semantic_data_type})")
-
end
-
end
-
convert_to_internal_form(semantic_data_type,value)
-
end
-
-
1
def self.is_valid?(semantic_data_type,value)
-
value.nil? or lookup(semantic_data_type).is_valid?(value)
-
end
-
-
1
def self.datatype(semantic_data_type)
-
lookup(semantic_data_type).datatype()
-
end
-
-
1
def is_valid?(value)
-
@validation_proc.nil? or @validation_proc.call(value)
-
end
-
-
1
def self.isa?(term)
-
all_types().has_key?(term.to_sym)
-
end
-
-
1
def self.convert_to_internal_form(semantic_data_type,value)
-
if semantic_data_type
-
lookup(semantic_data_type).convert_to_internal_form(value)
-
else
-
value
-
end
-
end
-
1
def convert_to_internal_form(value)
-
@internal_form_proc ? @internal_form_proc.call(value) : value
-
end
-
-
1
private
-
1
def self.lookup(semantic_data_type)
-
unless ret = all_types()[semantic_data_type.to_sym]
-
raise ErrorUsage.new("Illegal datatype (#{semantic_data_type})")
-
end
-
ret
-
end
-
end
-
end
-
end
-
-
# TODO: modify so that types like port can call tehir parents methods
-
2
module DTK; class Attribute
-
1
class SemanticDatatype
-
1
Type :object do
-
1
basetype :json
-
end
-
1
Type :array do
-
1
basetype :json
-
1
validation lambda{|v|v.kind_of?(Array)}
-
end
-
1
Type :hash do
-
1
basetype :json
-
1
validation lambda{|v|v.kind_of?(Hash)}
-
end
-
1
Type :port do
-
1
basetype :integer
-
1
validation /^[0-9]+$/
-
1
internal_form lambda{|v|v.to_i}
-
end
-
1
Type :log_file do
-
1
basetype :string
-
1
validation /.*/ #so checks that it is scalar
-
end
-
-
1
Type :node_template_type do
-
1
basetype :string
-
1
validation /.*/ #TODO: put validation in here; may need a handle in appropriate place in object model to see what is valid
-
end
-
-
# base types
-
1
Type :string do
-
1
basetype :string
-
1
validation /.*/ #so checks that it is scalar
-
end
-
1
Type :integer do
-
1
basetype :integer
-
1
validation /^[0-9]+$/
-
1
internal_form lambda{|v|v.to_i}
-
end
-
1
Type :boolean do
-
1
basetype :boolean
-
1
validation /true|false/
-
internal_form lambda{|v|
-
if v.kind_of?(TrueClass) or v == 'true'
-
true
-
elsif v.kind_of?(FalseClass) or v == 'false'
-
false
-
else
-
raise Error.new("Bad boolean type (#{v.inspect})") #this should not be reached since v is validated before this fn called
-
end
-
1
}
-
end
-
# TODO: may deprecate
-
1
Type :json do
-
1
basetype :json
-
end
-
end
-
end; end
-
-
1
require 'docile'
-
1
module DTK
-
1
class Attribute
-
1
class SemanticDatatype
-
1
module SemanticDatatypeClassMixin
-
1
def all_types()
-
@cache||Hash.new
-
end
-
1
def Type(name,&block)
-
10
el = ::Docile.dsl_eval(new(name),&block).build
-
10
@cache ||= Hash.new
-
10
@cache.merge!(name.to_sym => el)
-
end
-
end
-
-
1
module SemanticDatatypeMixin
-
1
def basetype(datatype)
-
10
datatype = datatype.to_sym
-
10
unless DataTypes.include?(datatype)
-
raise Error.new("Illegal datatype (#{datatype})")
-
end
-
10
@datatype = datatype
-
end
-
1
DataTypes = [:json,:string,:integer,:integer,:boolean]
-
-
1
def parent(parent)
-
@parent = parent.to_s
-
end
-
1
def validation(validation)
-
8
@validation_proc =
-
if validation.kind_of?(Proc)
-
2
validation
-
elsif validation.kind_of?(Regexp)
-
6
lambda do |v|
-
v.respond_to?(:to_s) and
-
(not v.kind_of?(Array)) and
-
(not v.kind_of?(Hash)) and
-
v.to_s =~ validation
-
end
-
else
-
raise Error.new("Illegal validation argument (#{validation.inspect})")
-
end
-
end
-
-
1
def internal_form(internal_form_proc)
-
3
@internal_form_proc = internal_form_proc
-
end
-
-
1
def build()
-
10
unless @datatype
-
raise Error.new("Datatype must be specified")
-
end
-
10
self
-
end
-
end
-
end
-
end
-
end
-
2
module DTK; class Attribute
-
1
class SpecialProcessing
-
1
r8_nested_require('special_processing','value_check')
-
1
r8_nested_require('special_processing','update')
-
1
private
-
-
1
def self.needs_special_processing?(attr)
-
attr_info(attr)
-
end
-
-
1
def self.attr_info(attr)
-
if type = attribute_type(attr)
-
SpecialProcessingInfo[type][attr.get_field?(:display_name).to_sym]
-
end
-
end
-
-
1
def self.attribute_type(attr)
-
attr.update_object!(:node_node_id,:component_component_id)
-
if attr[:node_node_id] then :node
-
elsif attr[:component_component_id] then :component
-
else
-
Log.error("Unexepected that both :node_node_id and :component_component_id are nil")
-
nil
-
end
-
end
-
-
1
class LegalValues
-
1
attr_reader :print_form
-
1
def include?(val)
-
@charachteristic_fn.call(val)
-
end
-
1
def self.create?(attr,attr_info)
-
if attr_info
-
if attr_info[:legal_values] or (attr_info[:legal_value_fn] and attr_info[:legal_value_error_msg])
-
new(attr,attr_info)
-
end
-
end
-
end
-
1
private
-
1
def initialize(attr,attr_info)
-
if attr_info[:legal_values]
-
legal_values = attr_info[:legal_values].call(attr)
-
@charachteristic_fn = lambda{|v|legal_values.include?(v)}
-
@print_form = legal_values
-
else #attr_info[:legal_value_fn] and attr_info[:legal_value_error_msg]
-
@charachteristic_fn = attr_info[:legal_value_fn]
-
@print_form = [attr_info[:legal_value_error_msg]]
-
end
-
end
-
end
-
-
1
SpecialProcessingInfo = {
-
:node => {
-
:memory_size => {
-
:legal_values => lambda{|a|Node::Template.legal_memory_sizes(a.model_handle(:node))},
-
:proc => lambda{|a,v|Update::MemorySize.new(a,v).process()}
-
},
-
:os_identifier =>{
-
:legal_values => lambda{|a|Node::Template.legal_os_identifiers(a.model_handle(:node))},
-
:proc => lambda{|a,v|Update::OsIdentifier.new(a,v).process()}
-
},
-
:cardinality =>{
-
:legal_value_fn => lambda do |v|
-
val =
-
if v.kind_of?(Fixnum) then v
-
elsif v.kind_of?(String) and v =~ /^[0-9]+$/ then v.to_i
-
end
-
val and val > 0
-
end,
-
:legal_value_error_msg => "Value must be a positive integer",
-
:proc => lambda{|a,v|Update::GroupCardinality.new(a,v).process()}
-
}
-
},
-
:component => {
-
}
-
}
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class SpecialProcessing
-
1
class Update < self
-
1
def self.handle_special_processing_attributes(existing_attrs,ndx_new_vals)
-
existing_attrs.each do |attr|
-
if needs_special_processing?(attr)
-
new_val = ndx_new_vals[attr[:id]]
-
attr_info(attr)[:proc].call(attr,new_val)
-
end
-
end
-
end
-
1
private
-
1
def initialize(attr,new_val)
-
@attr = attr
-
@new_val = new_val
-
end
-
-
1
class GroupCardinality < self
-
1
def initialize(attr,new_val)
-
super(attr,new_val.to_i)
-
end
-
1
def process()
-
@attr.update_object!(:value_asserted,:node_node_id)
-
existing_val = (@attr[:value_asserted]||0).to_i
-
if @new_val == existing_val
-
raise ErrorUsage.new("Value set equals existing value (#{existing_val.to_s})")
-
end
-
node_group = @attr.get_service_node_group(:cols => [:id,:group_id,:display_name,:datacenter_datacenter_id,:assembly_id])
-
if @new_val > existing_val
-
node_group.add_group_members(@new_val)
-
else @new_val < existing_val
-
node_group.delete_group_members(@new_val)
-
end
-
end
-
end
-
-
1
class OsIdentifier < self
-
1
def process()
-
os_identifier = @new_val
-
node, target = get_node_and_target()
-
image_id, os_type = Node::Template.find_image_id_and_os_type(os_identifier,target)
-
unless image_id
-
target.update_object!(:display_name,:iaas_type,:iaas_properties)
-
err_msg = "No image_id defined for os identifier (#{os_identifier}) in target #{target[:display_name]}"
-
if region = target.iaas_properties.hash[:region]
-
err_msg << " (region: #{region})"
-
end
-
raise ErrorUsage.new(err_msg)
-
end
-
update_node!(node,image_id,os_type)
-
if node.is_node_group?()
-
ServiceNodeGroup.get_node_group_members(node.id_handle()).each do |target_ref_node|
-
update_node!(target_ref_node,image_id,os_type)
-
end
-
end
-
end
-
1
private
-
1
def get_node_and_target()
-
node = @attr.get_node(:cols => [:id,:group_id,:display_name,:type,:datacenter_datacenter_id])
-
[node,node.get_target()]
-
end
-
-
1
def update_node!(node,image_id,os_type)
-
node.update_external_ref_field(:image_id,image_id)
-
node.update(:os_type => os_type)
-
end
-
end
-
-
1
class MemorySize < self
-
1
def process()
-
node = @attr.get_node(:cols => [:id,:group_id,:display_name,:type,:external_ref])
-
node.update_external_ref_field(:size,@new_val)
-
if node.is_node_group?()
-
ServiceNodeGroup.get_node_group_members(node.id_handle()).each do |target_ref_node|
-
target_ref_node.update_external_ref_field(:size,@new_val)
-
end
-
end
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class SpecialProcessing
-
1
class ValueCheck < self
-
# returns [whether_special_processing,nil_or_value_check_error]
-
1
def self.error_special_processing?(attr,new_val)
-
error = nil
-
if attr_info = needs_special_processing?(attr)
-
error = error?(attr,attr_info,new_val)
-
end
-
special_processing = (not attr_info.nil?)
-
[special_processing,error]
-
end
-
-
1
private
-
1
def self.error?(attr,attr_info,new_val)
-
if legal_values = LegalValues.create?(attr,attr_info)
-
unless legal_values.include?(new_val)
-
LegalValue::Error.new(attr,new_val,:legal_values => legal_values.print_form)
-
end
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Attribute
-
1
class UpdateDerivedValues
-
1
r8_nested_require('update_derived_values','delete')
-
-
1
def self.update(attr_mh,update_deltas,opts={})
-
ret = Array.new
-
attr_ids = update_deltas.map{|r|r[:id]}
-
critical_section(attr_ids) do
-
ret = update_in_critical_section(attr_mh,update_deltas,opts={})
-
end
-
ret
-
end
-
-
# links_delete_info has type array of Delete::LinkInfo
-
1
def self.update_for_delete_links(attr_mh,links_delete_info)
-
ret = Array.new
-
attr_ids = links_delete_info.map{|l|l.input_attribute[:id]}
-
critical_section(attr_ids) do
-
ret = links_delete_info.map{|link_info|Delete.update_attribute(attr_mh,link_info)}
-
end
-
ret
-
end
-
-
1
private
-
1
Lock = Mutex.new
-
1
def self.critical_section(attr_ids,&block)
-
# passing in attr_ids, but not using now; may use if better to lock on per attribute basis
-
Lock.synchronize{yield}
-
end
-
-
1
def self.update_in_critical_section(attr_mh,update_deltas,opts={})
-
# break up by type of row and process and aggregate
-
return Array.new if update_deltas.empty?
-
ndx_update_deltas = update_deltas.inject({}) do |h,r|
-
index = Aux::demodulize(r.class.to_s)
-
(h[index] ||= Array.new) << r
-
h
-
end
-
ndx_update_deltas.map do |type,rows|
-
update_attribute_values_aux(type,attr_mh,rows,opts)
-
end.flatten
-
end
-
-
1
def self.update_attribute_values_aux(type,attr_mh,update_deltas,opts={})
-
case type
-
when "OutputArrayAppend"
-
update_attribute_values_array_append(attr_mh,update_deltas,opts)
-
when "OutputPartial"
-
update_attribute_values_partial(attr_mh,update_deltas,opts)
-
else
-
update_attribute_values_simple(attr_mh,update_deltas,opts)
-
end
-
end
-
-
1
def self.update_attribute_values_simple(attr_mh,update_hashes,opts={})
-
ret = Array.new
-
id_list = update_hashes.map{|r|r[:id]}
-
Model.select_process_and_update(attr_mh,[:id,:value_derived],id_list) do |existing_vals|
-
ndx_existing_vals = existing_vals.inject({}){|h,r|h.merge(r[:id] => r[:value_derived])}
-
update_hashes.map do |r|
-
attr_id = r[:id]
-
existing_val = ndx_existing_vals[attr_id]
-
replacement_row = {:id => attr_id, :value_derived => r[:value_derived]}
-
ret << replacement_row.merge(:source_output_id => r[:source_output_id], :old_value_derived => existing_val)
-
replacement_row
-
end
-
end
-
ret
-
end
-
-
# appends value to any array type; if the array does not exist already it creates it from fresh
-
1
def self.update_attribute_values_array_append(attr_mh,array_slice_rows,opts={})
-
ndx_ret = Hash.new
-
attr_link_updates = Array.new
-
id_list = array_slice_rows.map{|r|r[:id]}
-
Model.select_process_and_update(attr_mh,[:id,:value_derived],id_list) do |existing_vals|
-
ndx_existing_vals = existing_vals.inject(Hash.new){|h,r|h.merge(r[:id] => r[:value_derived])}
-
ndx_attr_updates = array_slice_rows.inject(Hash.new) do |h,r|
-
attr_id = r[:id]
-
existing_val = ndx_existing_vals[attr_id]||[]
-
offset = existing_val.size
-
last_el = r[:array_slice].size-1
-
index_map = r[:output_is_array] ?
-
AttributeLink::IndexMap.generate_from_bounds(0,last_el,offset) :
-
AttributeLink::IndexMap.generate_for_output_scalar(last_el,offset)
-
attr_link_update = {
-
:id => r[:attr_link_id],
-
:index_map => index_map
-
}
-
attr_link_updates << attr_link_update
-
-
# update ndx_existing_vals to handle case where multiple entries pointing to same element
-
ndx_existing_vals[attr_id] = new_val = existing_val + r[:array_slice]
-
replacement_row = {:id => attr_id, :value_derived => new_val}
-
-
# if multiple entries pointing to same element then last one taken since it incorporates all of them
-
-
# TODO: if multiple entries pointing to same element source_output_id will be the last one;
-
# this may be be problematic because source_output_id may be used just for parent to use for change
-
# objects; double check this
-
ndx_ret.merge!(attr_id => replacement_row.merge(:source_output_id => r[:source_output_id], :old_value_derived => existing_val))
-
h.merge(attr_id => replacement_row)
-
end
-
ndx_attr_updates.values
-
end
-
-
# update the index_maps on the links
-
Model.update_from_rows(attr_mh.createMH(:attribute_link),attr_link_updates)
-
ndx_ret.values
-
end
-
-
1
def self.update_attribute_values_partial(attr_mh,partial_update_rows,opts={})
-
index_map_list = partial_update_rows.map{|r|r[:index_map] unless r[:index_map_persisted]}.compact
-
cmp_mh = attr_mh.createMH(:component)
-
AttributeLink::IndexMap.resolve_input_paths!(index_map_list,cmp_mh)
-
id_list = partial_update_rows.map{|r|r[:id]}
-
-
ndx_ret = Hash.new
-
Model.select_process_and_update(attr_mh,[:id,:value_derived],id_list) do |existing_vals|
-
ndx_existing_vals = existing_vals.inject({}) do |h,r|
-
h.merge(r[:id] => r[:value_derived])
-
end
-
partial_update_rows.each do |r|
-
# TODO: more efficient if cast out elements taht did not change
-
# TODO: need to validate that this works when theer are multiple nested values for same id
-
attr_id = r[:id]
-
existing_val = (ndx_ret[attr_id]||{})[:value_derived] || ndx_existing_vals[attr_id]
-
p = ndx_ret[attr_id] ||= {
-
:id => attr_id,
-
:source_output_id => r[:source_output_id],
-
:old_value_derived => ndx_existing_vals[attr_id]
-
}
-
p[:value_derived] = r[:index_map].merge_into(existing_val,r[:output_value])
-
end
-
# replacement rows
-
ndx_ret.values.map{|r|Aux.hash_subset(r,[:id,:value_derived])}
-
end
-
-
attr_link_updates = partial_update_rows.map do |r|
-
unless r[:index_map_persisted]
-
{
-
:id => r[:attr_link_id],
-
:index_map => r[:index_map]
-
}
-
end
-
end.compact
-
unless attr_link_updates.empty?
-
Model.update_from_rows(attr_mh.createMH(:attribute_link),attr_link_updates)
-
end
-
-
ndx_ret.values
-
end
-
-
1
def self.input_index(link_hash)
-
input_output_index_aux(link_hash,:input)
-
end
-
1
def self.output_index(link_hash)
-
input_output_index_aux(link_hash,:output)
-
end
-
1
def self.input_output_index_aux(link_hash,dir)
-
ret = nil
-
unless index_map = link_hash[:index_map]
-
return ret
-
end
-
unless index_map.size == 1
-
Log.error("not treating update_for_delete_link when index_map size is not equal to 1; its value is #{index_map.inspect}")
-
return ret
-
end
-
index_map.first && index_map.first[dir]
-
end
-
-
end
-
end; end
-
3
module DTK; class Attribute; class UpdateDerivedValues
-
# for processing deleting of links
-
1
class Delete < self
-
1
class LinkInfo
-
1
attr_reader :input_attribute,:deleted_links,:other_links
-
1
def initialize(input_attribute)
-
@input_attribute = input_attribute
-
@deleted_links = Array.new
-
@other_links = Array.new
-
end
-
1
def add_other_link!(link)
-
@other_links << link unless match?(@other_links,link)
-
end
-
1
def add_deleted_link!(link)
-
@deleted_links << link unless match?(@deleted_links,link)
-
end
-
1
private
-
1
def match?(links,link)
-
attribute_link_id = link[:attribute_link_id]
-
links.find{|l|l[:attribute_link_id] == attribute_link_id}
-
end
-
end
-
-
1
def self.update_attribute(attr_mh,link_info)
-
# determine if should null out input attribute or instead to splice out indexes from array
-
indexes_to_delete = Array.new
-
# test link_info.other_links.empty? is a simple way to test whether what is in deleted_links is all
-
# the entries in the input attribute
-
unless link_info.other_links.empty?
-
indexes_to_delete = link_info.deleted_links.map{|link|input_index(link)}.select do |input_index|
-
input_index and array_integer?(input_index)
-
end
-
end
-
-
if indexes_to_delete.empty?
-
set_to_null(attr_mh,link_info.input_attribute)
-
else
-
splice_out(attr_mh,indexes_to_delete,link_info)
-
end
-
end
-
1
private
-
1
def self.set_to_null(attr_mh,input_attribute)
-
row_to_update = {
-
:id =>input_attribute[:id],
-
:value_derived => nil
-
}
-
Model.update_from_rows(attr_mh,[row_to_update])
-
old_value_derived = input_attribute[:value_derived]
-
row_to_update.merge(:old_value_derived => old_value_derived)
-
end
-
-
1
IndexPositionInfo = Struct.new(:current_pos,:new_pos,:link)
-
-
# splice out the values in input array from the deleted links and renumber on the other links
-
1
def self.splice_out(attr_mh,indexes_to_delete,link_info)
-
ret = nil
-
input_attribute = link_info.input_attribute
-
-
# for other links to facilitate renumbering maintain a renumbering_mapping
-
index_pos_info_array = link_info.other_links.map do |link|
-
current_pos = array_integer(input_index(link))
-
IndexPositionInfo.new(current_pos,current_pos,link)
-
end
-
-
# will be interating over delete_positions; reversing order so dont have to renumber this
-
delete_positions = link_info.deleted_links.map do |link|
-
array_integer(input_index(link))
-
end.sort{|a,b|b <=> a}
-
Model.select_process_and_update(attr_mh,[:id,:value_derived],[input_attribute[:id]]) do |rows|
-
# will only be one row;
-
row = rows.first
-
val = row[:value_derived]
-
ret = {:id => row[:id], :old_value_derived => val.dup?}
-
delete_positions.each do |pos_to_delete|
-
val.delete_at(pos_to_delete)
-
index_pos_info_array.each do |other_link_info|
-
if other_link_info.new_pos > pos_to_delete
-
other_link_info.new_pos -= 1
-
end
-
end
-
end
-
ret.merge!(:value_derived => val)
-
[row] #row with changed :value_derived
-
end
-
-
renumber_links?(attr_mh,index_pos_info_array)
-
-
ret
-
end
-
-
1
def self.renumber_links?(attr_mh,index_pos_info_array)
-
rows_to_update = Array.new
-
index_pos_info_array.map do |index_pos_info|
-
if index_pos_info.current_pos != index_pos_info.new_pos
-
link = index_pos_info.link
-
new_index_map = [{:output => output_index(link), :input => [index_pos_info.new_pos]}]
-
rows_to_update << {:id => link[:attribute_link_id], :index_map => new_index_map}
-
end
-
end
-
unless rows_to_update.empty?
-
Model.update_from_rows(attr_mh.createMH(:attribute_link),rows_to_update)
-
end
-
end
-
-
1
def self.array_integer(input_index)
-
array_integer?(input_index,:no_error_msg => true) ||
-
raise(Error.new(error_msg_link_def_index(input_index)))
-
end
-
-
1
def self.array_integer?(input_index,opts={})
-
ret = nil
-
if input_index.kind_of?(Array) and input_index.size == 1 and input_index.first.kind_of?(Fixnum)
-
ret = input_index.first
-
end
-
if ret.nil? and !opts[:no_error_msg]
-
Log.error(error_msg_link_def_index(input_index))
-
end
-
ret
-
end
-
-
1
def self.error_msg_link_def_index(input_index)
-
"Unexpected that link def index (#{input_index.inspect}) does not have form: [n]"
-
end
-
end
-
end; end; end
-
1
module DTK
-
1
class AttributeLink < Model
-
1
r8_nested_require('attribute_link','propagate_mixins')
-
1
r8_nested_require('attribute_link','propagate_changes')
-
1
r8_nested_require('attribute_link','function')
-
1
r8_nested_require('attribute_link','propagate_processor')
-
1
r8_nested_require('attribute_link','ad_hoc')
-
-
1
extend PropagateChangesClassMixin
-
-
1
class Output < HashObject
-
end
-
1
class OutputArrayAppend < Output
-
end
-
1
class OutputPartial < Output
-
end
-
-
1
def self.common_columns()
-
[:id,:group_id,:display_name,:input_id,:output_id,:type,:hidden,:function,:index_map,:assembly_id,:port_link_id]
-
end
-
-
# virtual attribute defs
-
1
def output_index_map()
-
index_map_aux(:output)
-
end
-
1
def input_index_map()
-
index_map_aux(:input)
-
end
-
1
def index_map_aux(input_or_output)
-
if index_map = get_field?(:index_map)
-
unless index_map.size == 1
-
Log.error("Not treating item map with size greater than 1")
-
return nil
-
end
-
ret = index_map.first[input_or_output]
-
(!ret.empty?) && ret
-
end
-
end
-
1
private :index_map_aux
-
-
########################## get links ##################
-
1
def self.get_augmented(model_handle,filter)
-
ret = Array.new
-
sp_hash = {
-
:cols => [:id,:group_id,:input_id,:output_id,:function,:index_map],
-
:filter => filter
-
}
-
attr_links = get_objs(model_handle,sp_hash)
-
return ret if attr_links.empty?
-
-
attr_ids = attr_links.inject(Array.new){|array,al|array + [al[:input_id],al[:output_id]]}
-
filter = [:oneof,:id,attr_ids]
-
ndx_attrs = Attribute.get_augmented(model_handle.createMH(:attribute),filter).inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
-
attr_links.map{|al|al.merge(:input => ndx_attrs[al[:input_id]], :output => ndx_attrs[al[:output_id]])}
-
end
-
########################## end: get links ##################
-
-
########################## add new links ##################
-
1
def self.create_from_link_defs__clone_if_needed(parent_idh,link_def_context,opts={})
-
-
#TODO: might put back in on_create_events.each{|ev|ev.process!(context)}
-
-
# ret_links__clone_if_needed returns array of type LinkDef::Link::AttributeMapping::AugmentedLinkContext
-
# which has attribute_mapping plus needed context
-
aug_am_links = link_def_context.aug_attr_mappings__clone_if_needed(opts)
-
create_attribute_links(parent_idh,aug_am_links)
-
end
-
-
1
def self.create_attribute_links(parent_idh,rows_to_create,opts={})
-
return Array.new if rows_to_create.empty?
-
attr_mh = parent_idh.create_childMH(:attribute)
-
attr_link_mh = parent_idh.create_childMH(:attribute_link)
-
-
attr_info = create_attribute_links__attr_info(attr_mh,rows_to_create,opts)
-
add_link_fns!(rows_to_create,attr_info)
-
-
# add parent_col and ref
-
parent_col = attr_link_mh.parent_id_field_name()
-
parent_id = parent_idh.get_id()
-
rows_to_create.each do |row|
-
row[parent_col] ||= parent_id
-
row[:ref] ||= "attribute_link:#{row[:input_id]}-#{row[:output_id]}"
-
end
-
-
# actual create of new attribute_links
-
rows_for_array_ds = rows_to_create.map{|row|Aux::hash_subset(row,row.keys - remove_keys)}
-
select_ds = SQL::ArrayDataset.create(db,rows_for_array_ds,attr_link_mh,:convert_for_create => true)
-
override_attrs = {}
-
field_set = FieldSet.new(model_name,rows_for_array_ds.first.keys)
-
returning_ids = create_from_select(attr_link_mh,field_set,select_ds,override_attrs,:returning_sql_cols=> [:id])
-
-
# insert the new ids into rows_to_create
-
returning_ids.each_with_index{|id_info,i|rows_to_create[i][:id] = id_info[:id]}
-
-
# augment attributes with port info; this is needed only if port is external
-
Attribute.update_port_info(attr_mh,rows_to_create) unless opts[:donot_update_port_info]
-
-
# want to use auth_info from parent_idh in case more specific than target
-
change_parent_idh = parent_idh.get_top_container_id_handle(:target,:auth_info_from_self => true)
-
# propagate attribute values
-
ndx_nested_change_hashes = propagate_from_create(attr_mh,attr_info,rows_to_create,change_parent_idh)
-
StateChange.create_pending_change_items(ndx_nested_change_hashes.values) unless opts[:donot_create_pending_changes]
-
end
-
-
1
def self.attribute_info_cols()
-
[:id,:attribute_value,:semantic_type_object,:component_parent]
-
end
-
-
1
private
-
1
def self.propagate_from_create(attr_mh,attr_info,attr_links,change_parent_idh)
-
attrs_links_to_update = attr_links.map do |attr_link|
-
input_attr = attr_info[attr_link[:input_id]]
-
output_attr = attr_info[attr_link[:output_id]]
-
{
-
:input_attribute => input_attr,
-
:output_attribute => output_attr,
-
:attribute_link => attr_link,
-
:parent_idh => change_parent_idh
-
}
-
end
-
propagate(attr_mh,attrs_links_to_update)
-
end
-
-
# mechanism to compensate for fact that cols are being added by processing fns to rows_to_create that
-
# must be removed before they are saved
-
1
RemoveKeys = Array.new
-
1
def self.remove_keys()
-
RemoveKeys
-
end
-
1
def self.add_to_remove_keys(*keys)
-
3
keys.each{|k|RemoveKeys << k unless RemoveKeys.include?(k)}
-
end
-
-
1
def self.get_attribute_info(attr_mh,rows_to_create)
-
endpoint_ids = rows_to_create.map{|r|[r[:input_id],r[:output_id]]}.flatten.uniq
-
sp_hash = {
-
:cols => attribute_info_cols(),
-
:filter => [:oneof, :id, endpoint_ids]
-
}
-
get_objs(attr_mh,sp_hash)
-
end
-
-
1
def self.check_constraints(attr_mh,rows_to_create)
-
# TODO: may modify to get all constraints from conn_info_list
-
rows_to_create.each do |row|
-
# TODO: right now constraints just on input, not output, attributes
-
attr = attr_mh.createIDH(:id => row[:input_id]).create_object()
-
constraints = Constraints.new()
-
if row[:link_defs]
-
unless row[:conn_info]
-
constraints << Constraint::Macro.no_legal_endpoints(row[:link_defs])
-
end
-
end
-
next if constraints.empty?
-
target = {:target_port_id_handle => attr_mh.createIDH(:id => row[:output_id])}
-
# TODO: may treat differently if rows_to_create has multiple rows
-
constraints.evaluate_given_target(target, :raise_error_when_error_violation => true)
-
end
-
end
-
-
1
def self.create_attribute_links__attr_info(attr_mh,rows_to_create,opts={})
-
attr_rows = opts[:attr_rows]||get_attribute_info(attr_mh,rows_to_create)
-
attr_rows.inject({}){|h,attr|h.merge(attr[:id] => attr)}
-
end
-
-
1
def self.add_link_fns!(rows_to_create,attr_info)
-
rows_to_create.each do |r|
-
input_attr = attr_info[r[:input_id]].merge(r[:input_path] ? {:input_path => r[:input_path]} : {})
-
output_attr = attr_info[r[:output_id]].merge(r[:output_path] ? {:output_path => r[:output_path]} : {})
-
r[:function] ||= Function.link_function(r,input_attr,output_attr)
-
end
-
end
-
-
1
add_to_remove_keys :input_path,:output_path
-
-
####################
-
1
public
-
### special purpose create links ###
-
1
def self.create_links_node_group_members(node_group_id_handle,ng_cmp_id_handle,node_cmp_id_handles)
-
node_cmp_mh = node_cmp_id_handles.first.createMH
-
node_cmp_wc = {:ancestor_id => ng_cmp_id_handle.get_id()}
-
node_cmp_fs = FieldSet.opt([:id],:component)
-
node_cmp_ds = get_objects_just_dataset(node_cmp_mh,node_cmp_wc,node_cmp_fs)
-
-
attr_mh = node_cmp_mh.create_childMH(:attribute)
-
-
attr_parent_col = attr_mh.parent_id_field_name()
-
node_attr_fs = FieldSet.opt([attr_parent_col,:id,:ref],:attribute)
-
node_attr_ds = get_objects_just_dataset(attr_mh,nil,node_attr_fs)
-
-
group_attr_wc = {attr_parent_col => ng_cmp_id_handle.get_id()}
-
group_attr_fs = FieldSet.opt([:id,:ref],:attribute)
-
group_attr_ds = get_objects_just_dataset(attr_mh,group_attr_wc,group_attr_fs)
-
-
# attribute link has same parent as node_group
-
attr_link_mh = node_group_id_handle.create_peerMH(:attribute_link)
-
attr_link_parent_id_handle = node_group_id_handle.get_parent_id_handle()
-
attr_link_parent_col = attr_link_mh.parent_id_field_name()
-
ref_prefix = "attribute_link:"
-
i1_ds = node_cmp_ds.select(
-
{SQL::ColRef.concat(ref_prefix,:input__id.cast(:text),"-",:output__id.cast(:text)) => :ref},
-
{attr_link_parent_id_handle.get_id() => attr_link_parent_col},
-
{:input__id => :input_id},
-
{:output__id => :output_id},
-
{"member" => :type},
-
{"eq" => :function})
-
first_join_ds = i1_ds.join_table(:inner,node_attr_ds,{attr_parent_col => :id},{:table_alias => :input})
-
attr_link_ds = first_join_ds.join_table(:inner,group_attr_ds,[:ref],{:table_alias => :output})
-
-
attr_link_fs = FieldSet.new(:attribute,[:ref,attr_link_parent_col,:input_id,:output_id,:function,:type])
-
override_attrs = {}
-
-
opts = {:duplicate_refs => :no_check,:returning_sql_cols => [:input_id,:output_id]}
-
create_from_select(attr_link_mh,attr_link_fs,attr_link_ds,override_attrs,opts)
-
end
-
-
-
1
def self.create_links_sap(link_info,sap_attr_idh,sap_config_attr_idh,par_idh,node_idh)
-
attr_link_mh = sap_attr_idh.createMH(:model_name => :attribute_link, :parent_model_name => :node)
-
sap_id,sap_config_id,par_id,node_id = [sap_attr_idh,sap_config_attr_idh,par_idh,node_idh].map{|x|x.get_id()}
-
-
sap_config_name = link_info[:sap_config]
-
sap_name = link_info[:sap]
-
parent_attr_name = link_info[:parent_attr_name]
-
-
new_link_rows =
-
[
-
{
-
:ref => "#{sap_config_name}:#{sap_config_id.to_s}-#{sap_id}",
-
:display_name => "link:#{sap_config_name}-#{sap_name}",
-
:input_id => sap_id,
-
:output_id => sap_config_id,
-
:type => "internal",
-
:hidden => true,
-
:function => link_info[:sap_config_fn_name],
-
:node_node_id => node_id
-
},
-
{
-
:ref => "#{parent_attr_name}:#{par_id.to_s}-#{sap_id}",
-
:display_name => "link:#{parent_attr_name}-#{sap_name}",
-
:input_id => sap_id,
-
:output_id => par_id,
-
:type => "internal",
-
:hidden => true,
-
:function => link_info[:parent_fn_name],
-
:node_node_id => node_id
-
}
-
]
-
create_from_rows(attr_link_mh,new_link_rows)
-
end
-
-
# TODO: deprecate below after subsuming from above
-
1
def self.create_links_l4_sap(new_sap_attr_idh,sap_config_attr_idh,ipv4_host_addrs_idh,node_idh)
-
attr_link_mh = node_idh.createMH(:model_name => :attribute_link, :parent_model_name => :node)
-
new_sap_id,sap_config_id,ipv4_id,node_id = [new_sap_attr_idh,sap_config_attr_idh,ipv4_host_addrs_idh,node_idh].map{|x|x.get_id()}
-
-
new_link_rows =
-
[
-
{
-
:ref => "sap_config:#{sap_config_id.to_s}-#{new_sap_id}",
-
:display_name => "link:sap_config-sap",
-
:input_id => new_sap_id,
-
:output_id => sap_config_id,
-
:type => "internal",
-
:hidden => true,
-
:function => "sap_config__l4",
-
:node_node_id => node_id
-
},
-
{
-
:ref => "host_address:#{ipv4_id.to_s}-#{new_sap_id}",
-
:display_name => "link:host_address-sap",
-
:input_id => new_sap_id,
-
:output_id => ipv4_id,
-
:type => "internal",
-
:hidden => true,
-
:function => "host_address_ipv4",
-
:node_node_id => node_id
-
}
-
]
-
create_from_rows(attr_link_mh,new_link_rows)
-
end
-
-
########################## end add new links ##################
-
1
class IndexMap < Array
-
1
def merge_into(source,output_var)
-
self.inject(source) do |ret,el|
-
delta = el[:output].take_slice(output_var)
-
el[:input].merge_into(ret,delta)
-
end
-
end
-
-
1
def self.convert_if_needed(x)
-
x.kind_of?(Array) ? create_from_array(x) : x
-
end
-
-
1
def self.generate_from_paths(input_path,output_path)
-
create_from_array([{:input => input_path, :output => output_path}])
-
end
-
-
1
def self.generate_from_bounds(lower_bound,upper_bound,offset)
-
create_from_array((lower_bound..upper_bound).map{|i|{:output => [i], :input => [i+offset]}})
-
end
-
# TODO: may be able to be simplified because may only called be caleld with upper_bound == 0
-
1
def self.generate_for_output_scalar(upper_bound,offset)
-
create_from_array((0..upper_bound).map{|i|{:output => [], :input => [i+offset]}})
-
end
-
-
1
def input_array_indexes()
-
ret = Array.new
-
self.map do |el|
-
raise Error.new("unexpected form in input_array_indexes") unless el[:input].is_singleton_array?()
-
el[:input].first
-
end
-
end
-
-
1
def self.resolve_input_paths!(index_map_list,component_mh)
-
return if index_map_list.empty?
-
paths = Array.new
-
index_map_list.each{|im|im.each{|im_el|paths << im_el[:input]}}
-
IndexMapPath.resolve_paths!(paths,component_mh)
-
end
-
-
1
private
-
1
def self.create_from_array(a)
-
return nil unless a
-
ret = new()
-
a.each do |el|
-
input = el[:input].kind_of?(IndexMapPath) ? el[:input] : IndexMapPath.create_from_array(el[:input])
-
output = el[:output].kind_of?(IndexMapPath) ? el[:output] : IndexMapPath.create_from_array(el[:output])
-
ret << {:input => input, :output => output}
-
end
-
ret
-
end
-
end
-
-
1
class IndexMapPath < Array
-
1
def is_singleton_array?()
-
self.size == 1 and is_array_el?(self.first)
-
end
-
1
def take_slice(source)
-
return source if self.empty?
-
return nil if source.nil?
-
el = self.first
-
if is_array_el?(el)
-
if source.kind_of?(Array)
-
rest().take_slice(source[el])
-
else
-
Log.error("array expected")
-
nil
-
end
-
else
-
if source.kind_of?(Hash)
-
rest().take_slice(source[el.to_s])
-
else
-
Log.error("hash expected")
-
nil
-
end
-
end
-
end
-
-
1
def merge_into(source,delta)
-
return delta if self.empty?
-
el = self.first
-
if is_array_el?(el)
-
if source.kind_of?(Array) or source.nil?()
-
ret = source ? source.dup : []
-
if ret.size <= el
-
ret += (0.. el - ret.size).map{nil}
-
end
-
ret[el] = rest().merge_into(ret[el],delta)
-
ret
-
else
-
Log.error("array expected")
-
nil
-
end
-
else
-
if source.kind_of?(Hash) or source.nil?()
-
ret = source || {}
-
ret.merge(el.to_s => rest().merge_into(ret[el.to_s],delta))
-
else
-
Log.error("hash expected")
-
nil
-
end
-
end
-
end
-
-
# TODO: more efficient and not needed if can be resolved when get index
-
1
def self.resolve_paths!(path_list,component_mh)
-
ndx_cmp_idhs = Hash.new
-
path_list.each do |index_map_path|
-
index_map_path.each_with_index do |el,i|
-
next unless el.kind_of?(Hash)
-
next unless id = (el[:create_component_index]||{})[:component_id]
-
ndx_cmp_idhs[id] ||= {:idh => component_mh.createIDH(:id => id), :elements => Array.new}
-
ndx_cmp_idhs[id][:elements] << {:path => index_map_path, :i => i}
-
end
-
end
-
return if ndx_cmp_idhs.empty?
-
cmp_idhs = ndx_cmp_idhs.values.map{|x|x[:idh]}
-
sp_hash = {:cols => [:id,:multiple_instance_ref]}
-
opts = {:keep_ref_cols => true}
-
cmp_info = Model.get_objects_in_set_from_sp_hash(cmp_idhs,sp_hash,opts)
-
cmp_info.each do |r|
-
ref = r[:multiple_instance_ref]
-
ndx_cmp_idhs[r[:id]][:elements].each do |el|
-
el[:path][el[:i]] = ref
-
end
-
end
-
end
-
1
private
-
1
def self.create_from_array(a)
-
ret = new()
-
return ret unless a
-
a.each do |el|
-
if el.kind_of?(String) and el =~ /^[0-9]+$/
-
el = el.to_i
-
end
-
ret << el
-
end
-
ret
-
end
-
-
1
def rest()
-
self[1..self.size-1]
-
end
-
1
def is_array_el?(el)
-
el.kind_of?(Fixnum)
-
end
-
end
-
-
######################## TODO: see whichj of below is still used
-
1
def self.get_legal_connections(parent_id_handle)
-
c = parent_id_handle[:c]
-
parent_id = IDInfoTable.get_id_from_id_handle(parent_id_handle)
-
component_ds = get_objects_just_dataset(ModelHandle.new(c,:component),nil,{:parent_id => parent_id}.merge(FieldSet.opt([:id,:external_ref],:component)))
-
attribute_ds = get_objects_just_dataset(ModelHandle.new(c,:attribute),nil,FieldSet.opt([:id,:external_ref,:component_component_id],:attribute))
-
-
attribute_link_ds = get_objects_just_dataset(ModelHandle.new(c,:attribute_link))
-
component_ds.graph(:inner,attribute_ds,{:component_component_id => :id}).graph(:left_outer,attribute_link_ds,{:input_id => :id}).where({:attribute_link__id => nil}).all
-
end
-
-
1
def self.get_legal_connections_wrt_endpoint(attribute_id_handle,parent_id_handle)
-
end
-
-
1
private
-
1
def self.ret_function_if_can_determine(input_obj,output_obj)
-
i_sem = input_obj[:semantic_type]
-
return nil if i_sem.nil?
-
o_sem = output_obj[:semantic_type]
-
return nil if o_sem.nil?
-
-
# TBD: haven't put in any rules if they have different seamntic types
-
return nil unless i_sem.keys.first == o_sem.keys.first
-
-
sem_type = i_sem.keys.first
-
ret_function_endpoints_same_type(i_sem[sem_type],o_sem[sem_type])
-
end
-
-
1
def self.ret_function_endpoints_same_type(i,o)
-
# TBD: more robust is allowing for example output to be "database", which matches with "postgresql" and also to have version info, etc
-
raise Error.new("mismatched input and output types") unless i[:type] == o[:type]
-
return :equal if !i[:is_array] and !o[:is_array]
-
return :equal if i[:is_array] and o[:is_array]
-
return :concat if !i[:is_array] and o[:is_array]
-
raise Error.new("mismatched input and output types") if i[:is_array] and !o[:is_array]
-
nil
-
end
-
-
1
def get_input_attribute(opts={})
-
return nil if self[:input_id].nil?
-
get_object_from_db_id(self[:input_id],:attribute)
-
end
-
-
1
def get_output_attribute(opts={})
-
return nil if self[:output_id].nil?
-
get_object_from_db_id(self[:output_id],:attribute)
-
end
-
end
-
end
-
-
-
##### Actions
-
=begin TODO: needs fixing up or removal
-
def create(target_id_handle,input_id_handle,output_id_handle,href_prefix,opts={})
-
raise Error.new("Target location (#{target_id_handle}) does not exist") unless exists? target_id_handle
-
-
input_obj = Object.get_object(input_id_handle)
-
raise Error.new("Input endpoint does not exist") if input_obj.nil?
-
i_ref = input_obj.get_qualified_ref
-
-
output_obj = Object.get_object(output_id_handle)
-
raise Error.new("Output endpoint does not exist") if output_obj.nil?
-
o_ref = output_obj.get_qualified_ref
-
-
link_content = {:input_id => input_obj[:id],:output_id => output_obj[:id]}
-
link_ref = (i_ref.to_s + "_" + o_ref.to_s).to_sym
-
-
factory_id_handle = get_factory_id_handle(target_id_handle,:attribute_link)
-
link_ids = create_from_hash(factory_id_handle,{link_ref => link_content})
-
fn = ret_function_if_can_determine(input_obj,output_obj)
-
output_obj.check_and_set_derived_rel_from_link_fn!(fn)
-
link_ids
-
end
-
=end
-
# returns function if can determine from semantic type of input and output
-
# throws an error if finds a mismatch
-
1
module DTK
-
1
class AttributeLink
-
1
class AdHoc < Hash
-
# Logic is if update meta then meta updated as well as ad_hoc updates for existing component instances
-
1
def self.create_adhoc_links(assembly,target_attr_term,source_attr_term,opts={})
-
parsed_info = Attribute::Pattern::Assembly::Link.parsed_adhoc_link_info(self,assembly,target_attr_term,source_attr_term)
-
unless opts[:update_meta] and parsed_info.meta_update_supported?()
-
return create_ad_hoc_attribute_links?(assembly,parsed_info.links)
-
end
-
-
dep_cmp = parsed_info.dep_component_instance
-
peer_cmps = assembly.get_peer_component_instances(dep_cmp)
-
# get_peer_component_instances must be done before AssemblyModule::Component::AdHocLink, which modifies parents
-
result = AssemblyModule::Component::AdHocLink.update(assembly,parsed_info)
-
if link_def_info = result[:link_def_created]
-
link_def_hash = link_def_info[:hash_form]
-
antec_cmp = parsed_info.antec_component_instance
-
create_link_defs_and_service_links(assembly,parsed_info.links,dep_cmp,peer_cmps,antec_cmp,link_def_hash)
-
else
-
create_attribute_links?(assembly,parsed_info.links,dep_cmp,peer_cmps)
-
end
-
end
-
-
# type should be :source or :target
-
1
def attribute_pattern(type)
-
@attr_pattern[type]
-
end
-
-
1
def all_dep_component_instance_hashes(assembly,dep_component,peer_cmps)
-
ret = [self]
-
# get peer component instances
-
return ret if peer_cmps.empty?
-
-
# find whether target or source side matches with dep_component
-
dep_side,antec_side,dep_attr_field,antec_attr_field =
-
if attribute_pattern(:target).component_instance.id() == dep_component.id()
-
[:target,:source,:input_id,:output_id]
-
else
-
[:source,:target,:output_id,:input_id]
-
end
-
-
# find the matching attributes on the peer components
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:and,[:oneof,:component_component_id,peer_cmps.map{|cmp|cmp.id()}],
-
[:eq,:display_name,attribute_pattern(dep_side).attribute_name]]
-
}
-
assembly_id = assembly.id()
-
antec_attr_id = attribute_pattern(antec_side).attribute_id()
-
peer_attrs = Model.get_objs(assembly.model_handle(:attribute),sp_hash).map do |dep_attr|
-
{
-
dep_attr_field => dep_attr.id(),
-
antec_attr_field => antec_attr_id,
-
:assembly_id => assembly_id
-
}
-
end
-
-
ret + peer_attrs
-
end
-
-
1
private
-
1
def initialize(hash,target_attr_pattern,source_attr_pattern)
-
super()
-
replace(hash)
-
@attr_pattern = {
-
:target => target_attr_pattern,
-
:source => source_attr_pattern.attribute_pattern
-
}
-
end
-
-
1
def self.create_link_defs_and_service_links(assembly,parsed_adhoc_links,dep_cmp,peer_cmps,antec_cmp,link_def_hash)
-
# This method iterates over all the components in assembly that includes dep_cmp and its peers and for each
-
# adds the link_def to it and then service link between this and antec_cmp
-
dependency_name = link_def_hash.values.first[:link_type]
-
antec_cmp_idh = antec_cmp.id_handle()
-
([dep_cmp] + peer_cmps).each do |cmp|
-
# TODO: can be more efficient to combine these two operations and see if can bulk them
-
cmp_idh = cmp.id_handle()
-
Model.input_hash_content_into_model(cmp_idh,:link_def => link_def_hash)
-
assembly.add_service_link?(cmp_idh,antec_cmp_idh,:dependency_name => dependency_name)
-
end
-
end
-
-
1
def self.create_attribute_links?(assembly,parsed_adhoc_links,dep_component,peer_components)
-
attr_link_rows = parsed_adhoc_links.inject(Array.new) do |a,adhoc_link|
-
a + adhoc_link.all_dep_component_instance_hashes(assembly,dep_component,peer_components)
-
end
-
create_ad_hoc_attribute_links?(assembly,attr_link_rows)
-
end
-
-
1
def self.create_ad_hoc_attribute_links?(assembly,attr_link_rows)
-
ret = Array.new
-
existing_links = get_matching_ad_hoc_attribute_links(assembly,attr_link_rows)
-
new_links = attr_link_rows.reject do |link|
-
existing_links.find do |existing_link|
-
existing_link[:output_id] == link[:output_id] and
-
existing_link[:input_id] == link[:input_id]
-
end
-
end
-
return ret if new_links.empty?
-
opts_create = {
-
:donot_update_port_info => true,
-
:donot_create_pending_changes => true
-
}
-
AttributeLink.create_attribute_links(assembly.id_handle(),new_links,opts_create)
-
end
-
-
1
def self.get_matching_ad_hoc_attribute_links(assembly,attr_link_rows)
-
ret = Array.new
-
return ret if attr_link_rows.empty?
-
assembly_id = assembly.id()
-
disjunct_array = attr_link_rows.map do |r|
-
[:and,[:eq,:assembly_id,assembly_id],
-
[:eq,:output_id,r[:output_id]],
-
[:eq,:input_id,r[:input_id]]]
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:assembly_id,:input_id,:output_id],
-
:filter => [:or] + disjunct_array
-
}
-
Model.get_objs(assembly.model_handle(:attribute_link),sp_hash)
-
end
-
-
end
-
end
-
end
-
-
2
module DTK; class AttributeLink
-
1
class Function
-
# base must go before its children
-
1
r8_nested_require('function','base')
-
1
r8_nested_require('function','eq')
-
1
r8_nested_require('function','eq_indexed')
-
1
r8_nested_require('function','array_append')
-
-
# with_args must go before its children
-
1
r8_nested_require('function','with_args')
-
1
r8_nested_require('function','composite')
-
1
r8_nested_require('function','var_embedded_in_text')
-
-
1
include Propagate::Mixin
-
1
def initialize(function_def,propagate_proc)
-
# TODO: when get rid of lgacy fn processing can get rid of needing to copy all these vars
-
@propagate_proc = propagate_proc
-
@output_attr = propagate_proc.output_attr
-
@index_map = propagate_proc.index_map
-
@attr_link_id = propagate_proc.attr_link_id
-
@input_attr = propagate_proc.input_attr
-
@output_attr = propagate_proc.output_attr
-
@input_path = propagate_proc.input_path
-
@output_path = propagate_proc.output_path
-
end
-
-
1
def self.link_function(link_info,input_attr,output_attr)
-
ret = base_fn = Base.base_link_function(input_attr,output_attr)
-
if link_info.respond_to?(:parse_function_with_args?)
-
if parse_info = link_info.parse_function_with_args?()
-
ret = WithArgs.with_args_link_function(base_fn,parse_info)
-
end
-
end
-
ret
-
end
-
-
1
def internal_hash_form(opts={})
-
raise Error.new("Should not be called")
-
end
-
-
1
def value(opts={})
-
raise Error.new("Should not be called")
-
end
-
-
1
private
-
1
def self.internal_hash_form?(function_def,propagate_proc)
-
fn_name = function_name(function_def)
-
fn_klass = function_class_names().find{|k|k.name() == fn_name}
-
fn_klass && fn_klass.new(function_def,propagate_proc).internal_hash_form()
-
end
-
-
1
def self.function_class_names()
-
@function_class_names = [Eq,EqIndexed,ArrayAppend,Composite,VarEmbeddedInText]
-
end
-
-
1
def self.klass(name)
-
begin
-
const_get(Aux.camelize(name))
-
rescue
-
raise Error.new("Illegal function name (#{name}")
-
end
-
end
-
-
1
def self.name()
-
Aux.underscore(self.to_s).split('/').last.to_sym
-
end
-
-
1
def self.function_name(function_def)
-
Base.function_name?(function_def) || WithArgs.function_name?(function_def) ||
-
raise(Error.new("Function def has illegal form: #{function_def.inspect}"))
-
end
-
-
end
-
end; end
-
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class ArrayAppend < Base
-
# called when input is an array and each link into it appends teh value in
-
1
def internal_hash_form(opts={})
-
output_value = output_value(opts)
-
if @index_map.nil? and (@input_path.nil? or @input_path.empty?)
-
new_rows = output_value.nil? ? [nil] : (output_semantic_type().is_array? ? output_value : [output_value])
-
output_is_array = @output_attr[:semantic_type_object].is_array?()
-
OutputArrayAppend.new(:array_slice => new_rows, :attr_link_id => @attr_link_id, :output_is_array => output_is_array)
-
else
-
index_map_persisted = @index_map ? true : false
-
index_map = @index_map || AttributeLink::IndexMap.generate_from_paths(@input_path,nil)
-
OutputPartial.new(:attr_link_id => @attr_link_id, :output_value => output_value, :index_map => index_map, :index_map_persisted => index_map_persisted)
-
end
-
end
-
-
end
-
end
-
end; end
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class Base < self
-
1
def self.function_name?(function_def)
-
function_def.kind_of?(String) && function_def.to_sym
-
end
-
-
1
def self.base_link_function(input_attr,output_attr)
-
input_type = attribute_index_type__input(input_attr)
-
output_type = attribute_index_type__output(output_attr)
-
LinkFunctionMatrix[output_type][input_type]
-
end
-
# first index is output type, second one is input type
-
# TODO: DTK-2062; thnk want to get rod of select_one and have an 'error' one that leads to
-
# violation in service instance
-
1
LinkFunctionMatrix = {
-
:scalar => {
-
:scalar => "eq", :indexed => "eq_indexed", :array => "array_append"
-
},
-
:indexed => {
-
:scalar => "eq_indexed", :indexed => "eq_indexed", :array => "array_append"
-
},
-
:array => {
-
:scalar => "select_one", :indexed => "select_one", :array => "eq"
-
}
-
}
-
-
1
private
-
1
def self.attribute_index_type__input(attr)
-
# TODO: think may need to look at data type inside array
-
if attr[:input_path] then :indexed
-
else attr[:semantic_type_object].is_array?() ? :array : :scalar
-
end
-
end
-
-
1
def self.attribute_index_type__output(attr)
-
# TODO: may need to look at data type inside array
-
if attr[:output_path] then :indexed
-
else attr[:semantic_type_object].is_array?() ? :array : :scalar
-
end
-
end
-
end
-
-
end
-
end; end
-
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class Composite < WithArgs
-
1
def initialize(function_def,propagate_proc)
-
super
-
# need to reify constants
-
reify_constant!(:outer_function,propagate_proc)
-
reify_constant!(:inner_expression,propagate_proc)
-
end
-
-
1
def self.composite_link_function(outer_function,inner_expression)
-
{
-
:function => {
-
:name => :composite,
-
:constants => {
-
:outer_function => outer_function,
-
:inner_expression => inner_expression
-
}
-
}
-
}
-
end
-
-
1
def internal_hash_form(opts={})
-
unless opts.empty?
-
raise Error.new("Opts should be empty")
-
end
-
inner_value = inner_expression.value()
-
outer_function.internal_hash_form(:inner_value => inner_value)
-
end
-
-
1
def value(opts={})
-
inner_value = inner_expression.value()
-
outer_function.value(:inner_value => inner_value)
-
end
-
1
private
-
1
def reify_constant!(constant_name,propagate_proc)
-
nested_function_def = constants[constant_name]
-
nested_fn_name = self.class.function_name(nested_function_def)
-
nested_klass = self.class.klass(nested_fn_name)
-
constants[constant_name] = nested_klass.new(nested_function_def,propagate_proc)
-
end
-
-
1
def inner_expression()
-
constants[:inner_expression]
-
end
-
1
def outer_function()
-
constants[:outer_function]
-
end
-
end
-
end
-
end; end
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class Eq < Base
-
1
def internal_hash_form(opts={})
-
Output.new(:value_derived => output_value(opts))
-
end
-
end
-
end
-
end; end
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class EqIndexed < Base
-
# called when it is an equlaity setting between indexed values on input and output side.
-
# Can be the null index on one of the sides meaning to take whole value
-
# TODO: can simplify because only will be called when input is not an array
-
1
def internal_hash_form(opts={})
-
output_value = output_value(opts)
-
if @index_map.nil? and (@input_path.nil? or @input_path.empty?) and (@output_path.nil? or @output_path.empty?)
-
new_rows = output_value.nil? ? [nil] : (output_semantic_type().is_array? ? output_value : [output_value])
-
OutputArrayAppend.new(:array_slice => new_rows,
-
:attr_link_id => @attr_link_id)
-
else
-
index_map_persisted = @index_map ? true : false
-
index_map = @index_map || IndexMap.generate_from_paths(@input_path,@output_path)
-
OutputPartial.new(:attr_link_id => @attr_link_id,
-
:output_value => output_value,
-
:index_map => index_map,
-
:index_map_persisted => index_map_persisted)
-
end
-
end
-
-
end
-
end
-
end; end
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class VarEmbeddedInText < WithArgs
-
1
def value(opts={})
-
val = nil
-
var = output_value(opts)
-
# alternative sematics is to treat nil like var with empty string
-
return val if var.nil?
-
text_parts = constants[:text_parts].dup
-
val = text_parts.shift
-
text_parts.each do |text_part|
-
val << var
-
val << text_part
-
end
-
val
-
end
-
end
-
end
-
end; end
-
2
module DTK; class AttributeLink
-
1
class Function
-
1
class WithArgs < self
-
1
r8_nested_require('with_args','function_info')
-
-
1
def initialize(function_def,propagate_proc)
-
super
-
@function_info = FunctionInfo.create(function_def)
-
end
-
-
1
def self.with_args_link_function(base_fn,parse_info)
-
outer_function = base_fn
-
inner_expression = {
-
:function => {
-
:name => parse_info[:name],
-
:constants => parse_info[:constants]
-
}
-
}
-
Composite.composite_link_function(outer_function,inner_expression)
-
end
-
-
1
def self.function_name?(function_def)
-
if function_info = FunctionInfo.create?(function_def)
-
function_info.name
-
end
-
end
-
-
1
private
-
1
def constants()
-
@function_info.constants()
-
end
-
-
end
-
end
-
end; end
-
-
2
module DTK; class AttributeLink
-
1
class Function::WithArgs
-
1
class FunctionInfo
-
1
attr_reader :name,:constants
-
1
def initialize(name,constants_hash)
-
@name = name.to_sym
-
@constants = Constants.new(constants_hash)
-
end
-
-
1
def self.create(function_def)
-
unless ret = create?(function_def)
-
raise Error.new("Error creating (#{function_def.inspect})")
-
end
-
ret
-
end
-
1
def self.create?(function_def)
-
if function_def.kind_of?(Hash) and function_def.has_key?(:function)
-
fn_info_hash = function_def[:function]
-
unless fn_info_hash and fn_info_hash.has_key?(:name)
-
raise(Error.new("Function def has illegal form: #{function_def.inspect}"))
-
end
-
new(fn_info_hash[:name],fn_info_hash[:constants]||{})
-
end
-
end
-
-
1
class Constants < Hash
-
1
def initialize(hash)
-
super()
-
replace(hash)
-
end
-
1
def [](k)
-
unless has_key?(k)
-
raise Error.new("New constant (#{k}) found")
-
end
-
super
-
end
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class AttributeLink
-
1
module PropagateChangesClassMixin
-
# hash top level with :input_attribute,:output_attribute,:attribute_link, :parent_idh (optional)
-
# with **_attribute having :id,:value_asserted,:value_derived,:semantic_type
-
# :attribute_link having :function, :input_id, :output_id, :index_map
-
1
def propagate(attr_mh,attrs_links_to_update)
-
ret = Hash.new
-
# compute update deltas
-
update_deltas = compute_update_deltas(attrs_links_to_update)
-
-
# make actual changes
-
opts = {:update_only_if_change => [:value_derived],:returning_cols => [:id]}
-
-
changed_input_attrs = Attribute::UpdateDerivedValues.update(attr_mh,update_deltas,opts)
-
-
# if no changes exit, otherwise recursively call propagate
-
return ret if changed_input_attrs.empty?
-
-
# input attr parents are set to associated output attrs parent
-
output_id__parent_idhs = attrs_links_to_update.inject({}) do |h,r|
-
h.merge(r[:output_attribute][:id] => r[:parent_idh])
-
end
-
-
# compute direct changes and input for nested propagation
-
# TODO: may unifty with Attribute.create_change_hashes
-
ndx_direct_change_hashes = changed_input_attrs.inject({}) do |h,r|
-
id = r[:id]
-
change = {
-
:new_item => attr_mh.createIDH(:id => id),
-
:change => {:old => r[:old_value_derived], :new => r[:value_derived]}
-
}
-
if parent_idh = output_id__parent_idhs[r[:source_output_id]]
-
change.merge!(:parent => parent_idh)
-
end
-
h.merge(id => change)
-
end
-
-
# nested (recursive) propagatation call
-
ndx_propagated_changes = Attribute.propagate_changes(ndx_direct_change_hashes.values)
-
# return all changes
-
ndx_direct_change_hashes.merge(ndx_propagated_changes)
-
end
-
1
private
-
1
def compute_update_deltas(attrs_links_to_update)
-
attrs_links_to_update.map do |r|
-
input_attr = r[:input_attribute]
-
output_attr = r[:output_attribute]
-
propagate_proc = PropagateProcessor.new(r[:attribute_link],input_attr,output_attr)
-
propagate_proc.propagate().merge(:id => input_attr[:id], :source_output_id => output_attr[:id])
-
end
-
end
-
end
-
end; end
-
2
module DTK; class AttributeLink
-
1
module Propagate
-
1
module Mixin
-
1
def input_value()
-
@input_attr[:value_derived]
-
end
-
-
1
def input_semantic_type()
-
SemanticType.create_from_attribute(@input_attr)
-
end
-
-
1
def output_value(opts={})
-
if opts.has_key?(:inner_value)
-
opts[:inner_value]
-
else
-
@output_attr[:value_asserted] || @output_attr[:value_derived]
-
end
-
end
-
-
1
def output_semantic_type()
-
SemanticType.create_from_attribute(@output_attr)
-
end
-
end
-
end
-
end; end
-
-
# TODO: deprecated most of this and moving to DTK::AttributeLink::Function
-
2
module DTK; class AttributeLink
-
1
class PropagateProcessor
-
1
include Propagate::Mixin
-
1
attr_reader :index_map,:attr_link_id,:input_attr,:output_attr,:input_path,:output_path
-
1
def initialize(attr_link,input_attr,output_attr)
-
@function = attr_link[:function]
-
@index_map = AttributeLink::IndexMap.convert_if_needed(attr_link[:index_map])
-
@attr_link_id = attr_link[:id]
-
@input_attr = input_attr
-
@output_attr = output_attr
-
@input_path = attr_link[:input_path]
-
@output_path = attr_link[:output_path]
-
end
-
-
# propagate from output var to input var
-
1
def propagate()
-
hash_ret = Function.internal_hash_form?(function,self)
-
-
# TODO: this returns nil if it is not (yet) processed by Function meaning its legacy or illegal
-
unless hash_ret ||= legacy_internal_hash_form?()
-
raise Error::NotImplemented.new("propagate value not implemented yet for fn #{function}")
-
end
-
-
hash_ret.kind_of?(Output) ? hash_ret : Output.new(hash_ret)
-
end
-
-
1
private
-
1
def legacy_internal_hash_form?()
-
if function.kind_of?(String)
-
case function
-
when "select_one"
-
propagate_when_select_one()
-
when "sap_config__l4"
-
propagate_when_sap_config__l4()
-
when "host_address_ipv4"
-
propagate_when_host_address_ipv4()
-
when "sap_conn__l4__db"
-
propagate_when_sap_conn__l4__db()
-
when "sap_config_conn__db"
-
propagate_when_sap_config_conn__db()
-
end
-
end
-
end
-
-
-
-
# TODO: need to simplify so we dont need all these one ofs
-
#######function-specfic propagation
-
# TODO: refactor to use ret_cartesian_product()
-
1
def propagate_when_sap_config__l4()
-
output_v =
-
if output_semantic_type().is_array?
-
raise Error::NotImplemented.new("propagate_when_sap_config__l4 when output has empty list") if output_value.empty?
-
output_value
-
else
-
[output_value]
-
end
-
-
value = nil
-
if input_semantic_type().is_array?
-
# cartesian product with host_address
-
# TODO: may simplify and use flatten form
-
value = Array.new
-
output_v.each do |sap_config|
-
# TODO: euqivalent changes may be needed on other cartesion products: removing this for below value += input_value.map{|input_item|sap_config.merge("host_address" => input_item["host_address"])}
-
value += input_value.map{|iv|iv["host_address"]}.uniq.map{|addr|sap_config.merge("host_address" => addr)}
-
end
-
else #not input_semantic_type().is_array?
-
raise Error.new("propagate_when_sap_config__l4 does not support input scalar and output array with size > 1") if output_value.size > 1
-
value = output_v.first.merge("host_address" => input_value["host_address"])
-
end
-
{:value_derived => value}
-
end
-
-
# TODO: refactor to use ret_cartesian_product()
-
1
def propagate_when_host_address_ipv4()
-
output_v =
-
if output_semantic_type().is_array?
-
raise Error::NotImplemented.new("propagate_when_host_address_ipv4 when output has empty list") if output_value.empty?
-
output_value
-
else
-
[output_value]
-
end
-
-
value = nil
-
if input_semantic_type().is_array?
-
# cartesian product with host_address
-
value = output_v.map{|host_address|input_value.map{|input_item|input_item.merge("host_address" => host_address)}}.flatten
-
else #not input_semantic_type().is_array?
-
raise Error.new("propagate_when_host_address_ipv4 does not support input scalar and output array with size > 1") if output_value.size > 1
-
value = output_v.first.merge("host_address" => input_value["host_address"])
-
end
-
{:value_derived => value}
-
end
-
-
1
def propagate_when_sap_conn__l4__db()
-
ret_cartesian_product()
-
end
-
-
1
def propagate_when_sap_config_conn__db
-
ret_cartesian_product()
-
end
-
-
1
def propagate_when_select_one()
-
raise Error::NotImplemented.new("propagate_when_select_one when input has more than one elements") if output_value() and output_value().size > 1
-
{:value_derived => output_value ? output_value().first : nil}
-
end
-
-
1
def ret_cartesian_product()
-
output_v =
-
if output_semantic_type().is_array?
-
raise Error::NotImplemented.new("cartesian_product when output has empty list") if output_value.empty?
-
output_value
-
else
-
[output_value]
-
end
-
-
value = nil
-
if input_semantic_type().is_array?
-
value = Array.new
-
output_v.each do |sap_config|
-
value += input_value.map{|input_item|input_item.merge(sap_config)}
-
end
-
else #not input_semantic_type().is_array?
-
raise Error.new("cartesian_product does not support input scalar and output array with size > 1") if output_value.size > 1
-
value = input_value.merge(output_v.first)
-
end
-
{:value_derived => value}
-
end
-
-
#########instance var access fns
-
1
attr_reader :function
-
end
-
end; end
-
-
1
module XYZ
-
1
class AttributeOverride < Model
-
end
-
end
-
# TODO: deprecate when get all this logic in ModuleLocation::Target
-
# TODO: putting version defaults in now; may move to seperate file or rename to branch_names_and_versions
-
1
module DTK
-
1
VersionFieldDefault = 'master'
-
-
1
module BranchNamesMixin
-
1
def has_default_version?()
-
version = update_object!(:version)[:version]
-
version.nil? or (version == VersionFieldDefault)
-
end
-
-
1
protected
-
1
def workspace_branch_name(project)
-
self.class.workspace_branch_name(project,self[:version])
-
end
-
end
-
1
module BranchNamesClassMixin
-
1
def version_field_default()
-
35
VersionFieldDefault
-
end
-
-
1
def version_field(version)
-
version || VersionFieldDefault
-
end
-
-
1
def version_from_version_field(version_field)
-
unless version_field == VersionFieldDefault
-
ModuleVersion.ret(version_field)
-
end
-
end
-
-
# TODO: deprecate
-
-
1
def workspace_branch_name(project,version=nil)
-
# Log.info_pp(["#TODO: ModuleBranch::Location: deprecate workspace_branch_name direct call",caller[0..4]])
-
ModuleBranch::Location::Server::Local::workspace_branch_name(project,version)
-
end
-
end
-
end
-
1
files =
-
[
-
'model_def_processor',
-
'view_meta_processor',
-
'clone',
-
'user',
-
'meta'
-
]
-
1
r8_nested_require('component',files)
-
1
r8_require('branch_names')
-
1
module DTK
-
1
class Component < Model
-
1
r8_nested_require('component','get_method')
-
1
r8_nested_require('component','template')
-
1
r8_nested_require('component','instance')
-
1
r8_nested_require('component','dependency')
-
1
r8_nested_require('component','test')
-
1
r8_nested_require('component','resource_matching')
-
1
r8_nested_require('component','include_module')
-
1
include GetMethod::Mixin
-
1
extend GetMethod::ClassMixin
-
1
include Dependency::Mixin
-
1
extend Dependency::ClassMixin
-
1
include TemplateMixin
-
1
include ComponentModelDefProcessor
-
1
include ComponentViewMetaProcessor
-
1
include ComponentClone
-
1
extend ComponentUserClassMixin
-
1
extend ComponentMetaClassMixin
-
1
extend BranchNamesClassMixin
-
1
include BranchNamesMixin
-
-
1
set_relation_name(:component,:component)
-
1
def self.common_columns()
-
[
-
5
:id,
-
:group_id,
-
:display_name,
-
:name,
-
:external_ref,
-
:basic_type,
-
:type,
-
:component_type,
-
:specific_type,
-
:extended_base,
-
:extension_type,
-
:description,
-
:implementation_id,
-
:only_one_per_node,
-
:assembly_id,
-
:version,
-
:config_agent_type,
-
:ancestor_id,
-
:library_id,
-
:node_id,
-
:project_id,
-
:ui
-
]
-
end
-
-
1
def self.check_valid_id(model_handle,id,context={})
-
# TODO: put in check to make sure component instance and not a compoennt template
-
filter = [:eq,:id,id]
-
unless context.empty?
-
if assembly_id = context[:assembly_id]
-
filter = [:and,filter,[:eq,:assembly_id,assembly_id]]
-
else
-
raise Error.new("Unexepected context (#{context.inspect})")
-
end
-
end
-
check_valid_id_helper(model_handle,id,filter)
-
end
-
-
# just used for component instances; assumes that there is a node prefix in name
-
1
def self.name_to_id(model_handle,name,context={})
-
if context.empty?
-
return name_to_id_default(model_handle,name)
-
end
-
if assembly_id = context[:assembly_id]
-
display_name = Component.display_name_from_user_friendly_name(name)
-
node_name,cmp_type,cmp_title = ComponentTitle.parse_component_display_name(display_name,:node_prefix => true)
-
unless node_name
-
raise ErrorUsage.new("Ill-formed name for component (#{name}); it should have form NODE/CMP or NODE/MOD::CMP")
-
end
-
sp_hash = {
-
:cols => [:id,:node],
-
:filter => [:and,Component::Instance.filter(cmp_type,cmp_title), [:eq,:assembly_id,assembly_id]]
-
}
-
name_to_id_helper(model_handle,name,sp_hash.merge(:post_filter => lambda{|r|r[:node][:display_name] == node_name}))
-
else
-
raise Error.new("Unexepected context (#{context.inspect})")
-
end
-
end
-
-
1
def get_node()
-
get_obj_helper(:node)
-
end
-
-
1
def self.pending_changes_cols()
-
1
[:id,:node_for_state_change_info,:display_name,:basic_type,:external_ref,:node_node_id,:only_one_per_node,:extended_base_id,:implementation_id,:group_id]
-
end
-
-
# TODO: need to maintain relationship fro maintainability
-
1
def self.common_real_columns()
-
[
-
:id,
-
:display_name,
-
:extension_type,
-
:specific_type,
-
:type,
-
:component_type,
-
:ancestor_id,
-
:extended_base,
-
:implementation_id,
-
:assembly_id,
-
:ui,
-
:basic_type,
-
:only_one_per_node,
-
:version,
-
:external_ref,
-
:node_node_id,
-
:project_project_id,
-
:library_library_id
-
]
-
end
-
-
-
1
def copy_as_assembly_template()
-
ret = id_handle().create_object(:model_name => :assembly_template)
-
each{|k,v|ret[k]=v}
-
ret
-
end
-
1
def copy_as_assembly_instance()
-
ret = id_handle().create_object(:model_name => :assembly_instance)
-
each{|k,v|ret[k]=v}
-
ret
-
end
-
-
# MOD_RESTRUCT: TODO: see if this is what is wanted; now returning what is used in implementation and module branch fields
-
1
def self.default_version()
-
version_field_default()
-
end
-
-
### display name functions
-
1
def self.display_name_from_user_friendly_name(user_friendly_name)
-
# user_friendly_name.gsub(/::/,"__")
-
# using sub instead of gsub because we need only first :: to change to __
-
# e.g. we have cmp "mysql::bindings::java" we want "mysql__bindings::java"
-
user_friendly_name.sub(/::/,"__")
-
end
-
-
# TODO: these methods in this section need to be cleaned up and also possibly partitioned into Component::Instance and Component::Template
-
1
def display_name_print_form(opts={})
-
cols_to_get = [:component_type,:display_name]
-
unless opts[:without_version]
-
cols_to_get += [:version]
-
end
-
update_object!(*cols_to_get)
-
component_type = component_type_print_form()
-
-
# handle version
-
ret =
-
if opts[:without_version] or has_default_version?()
-
component_type
-
else
-
self.class.name_with_version(component_type,self[:version])
-
end
-
-
# handle component title
-
if title = ComponentTitle.title?(self)
-
ret = ComponentTitle.print_form_with_title(ret,title)
-
end
-
-
if opts[:namespace_prefix]
-
if cmp_namespace = self[:namespace]
-
ret = "#{cmp_namespace}:#{ret}"
-
end
-
end
-
-
if opts[:node_prefix]
-
if node = get_node()
-
ret = "#{node[:display_name]}/#{ret}"
-
end
-
end
-
ret
-
end
-
-
1
def self.name_with_version(name,version)
-
if version.kind_of?(ModuleVersion::Semantic)
-
"#{name}(#{version})"
-
else
-
name
-
end
-
end
-
-
1
def self.ref_with_version(ref,version)
-
"#{ref}__#{version}"
-
end
-
-
1
def self.module_name(component_type)
-
component_type.gsub(/__.+$/,'')
-
end
-
-
1
NamespaceDelim = ':'
-
1
def self.display_name_print_form(display_name,opts=Opts.new)
-
ret =
-
if opts[:no_module_name]
-
display_name.gsub(/^.+__/,"")
-
else
-
display_name.gsub(/__/,"::")
-
end
-
-
if namespace = opts[:namespace]
-
ret = "#{namespace}#{NamespaceDelim}#{ret}"
-
end
-
-
ret
-
end
-
-
1
def self.component_type_print_form(component_type,opts=Opts.new)
-
if opts[:no_module_name]
-
component_type.gsub(/^.+__/,"")
-
else
-
component_type.gsub(/__/,"::")
-
end
-
end
-
1
def component_type_print_form()
-
self.class.component_type_print_form(get_field?(:component_type))
-
end
-
-
1
def convert_to_print_form!()
-
update_object!(:display_name,:version)
-
component_type = component_type_print_form()
-
self[:display_name] = self.class.display_name_print_form(self[:display_name],{:namespace => self[:namespace]})
-
if has_default_version?()
-
self[:version] = nil
-
end
-
self
-
end
-
-
### end: display name functions
-
-
### virtual column defs
-
1
def name()
-
self[:display_name]
-
end
-
1
def node_id()
-
self[:node_node_id]
-
end
-
1
def project_id()
-
self[:project_project_id]
-
end
-
1
def library_id()
-
self[:library_library_id]
-
end
-
1
def config_agent_type()
-
cmp_external_ref_type = (self[:external_ref]||{})[:type]
-
case cmp_external_ref_type
-
when "chef_recipe" then "chef"
-
when "puppet_class","puppet_definition" then "puppet"
-
when "ruby_function" then "dtk_provider"
-
end
-
end
-
-
1
def instance_extended_base_id()
-
extended_base_id(:is_instance => true)
-
end
-
# TODO: expiremting with implementing this 'local def differently
-
1
def extended_base_id(opts={})
-
if self[:extended_base] and self[:implementation_id] and (self[:node_node_id] or not opts[:is_instance])
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and, [:eq, :implementation_id, self[:implementation_id]],
-
[:eq, :node_node_id, self[:node_node_id]],
-
[:eq, :component_type, self[:extended_base]]]
-
}
-
ret = Model.get_objects_from_sp_hash(model_handle,sp_hash).first[:id]
-
else
-
base_sp_hash = {
-
:model_name => :component,
-
:cols => [:implementation_id,:extended_base,:node_node_id]
-
}
-
join_array =
-
[{
-
:model_name => :component,
-
:alias => :base_component,
-
:join_type => :inner,
-
:join_cond => {
-
:implementation_id => :component__implementation_id,
-
:component_node_node_id => :component__node_node_id,
-
:component_type => :component__extended_base},
-
:cols => [:id,:implementation_id,:component_type]
-
}]
-
ret = Model.get_objects_from_join_array(model_handle,base_sp_hash,join_array).first[:base_component][:id]
-
end
-
self[:extended_base_id] = ret
-
end
-
-
1
def view_def_key()
-
self[:view_def_ref]||self[:component_type]||self[:id]
-
end
-
-
1
def most_specific_type()
-
self[:specific_type]||self[:basic_type]
-
end
-
-
1
def link_defs_external()
-
LinkDefsExternal.find!(self)
-
end
-
1
def connectivity_profile_internal()
-
(self[:link_defs]||{})["internal"] || LinkDefsInternal.find(self[:component_type])
-
end
-
-
1
def multiple_instance_ref()
-
(self[:ref_num]||1) - 1
-
end
-
-
1
def containing_datacenter()
-
(self[:datacenter_direct]||{})[:display_name]||
-
(self[:datacenter_node]||{})[:display_name]||
-
(self[:datacenter_node_group]||{})[:display_name]
-
end
-
-
# TODO: write as sql fn for efficiency
-
1
def has_pending_change()
-
((self[:state_change]||{})[:count]||0) > 0 or ((self[:state_change2]||{})[:count]||0) > 0
-
end
-
-
#######################
-
######### Model apis
-
-
1
def add_config_file(file_name,file_content)
-
# TODO: may check first that object does not have already a config file with same name
-
parent_col = DB.parent_field(:component,:file_asset)
-
-
create_row = {
-
:ref => file_name,
-
:type => "config_file",
-
:file_name => file_name,
-
:display_name => file_name,
-
parent_col => id(),
-
:content => file_content
-
}
-
-
file_asset_mh = id_handle().create_childMH(:file_asset)
-
Model.create_from_row(file_asset_mh,create_row)
-
end
-
-
-
1
def get_augmented_link_defs()
-
ndx_ret = Hash.new
-
get_objs(:cols => [:link_def_links]).each do |r|
-
link_def = r[:link_def]
-
pntr = ndx_ret[link_def[:id]] ||= link_def.merge(:link_def_links => Array.new)
-
pntr[:link_def_links] << r[:link_def_link]
-
end
-
ret = ndx_ret.values()
-
ret.each{|r|r[:link_def_links].sort!{|a,b|a[:position] <=> b[:position]}}
-
ret
-
end
-
-
1
def get_config_file(file_name)
-
sp_hash = {
-
:model_name => :file_asset,
-
:filter => [:and, [:eq, :file_name, file_name], [:eq, :type, "config_file"]],
-
:cols => [:id,:content]
-
}
-
get_children_from_sp_hash(:file_asset,sp_hash).first
-
end
-
1
def get_config_files(opts={}) # opts: {:include_content => true} means include content, otherwise just ids and file names returned
-
cols = [:id,:file_name]
-
cols << :content if opts[:include_content]
-
sp_hash = {
-
:model_name => :file_asset,
-
:filter => [:eq, :type, "config_file"],
-
:cols => cols
-
}
-
get_children_from_sp_hash(:file_asset,sp_hash)
-
end
-
-
1
def self.clear_dynamic_attributes_and_their_dependents(cmp_idhs)
-
dynamic_attrs = get_objs_in_set(cmp_idhs,{:cols => [:dynamic_attributes]}).map{|r|r[:attribute]}
-
Attribute.clear_dynamic_attributes_and_their_dependents(dynamic_attrs)
-
end
-
-
1
def get_virtual_attribute(attribute_name,cols,field_to_match=:display_name)
-
sp_hash = {
-
:model_name => :attribute,
-
:filter => [:eq, field_to_match, attribute_name],
-
:cols => cols
-
}
-
get_children_from_sp_hash(:attribute,sp_hash).first
-
end
-
-
1
def is_extension?()
-
return false if self.kind_of?(Assembly)
-
Log.error("this should not be called if :extended_base is not set") unless self.has_key?(:extended_base)
-
self[:extended_base] ? true : false
-
end
-
-
# looks at
-
# 1) directly directly connected attributes
-
# 2) if extension then attributes on teh extenion's base
-
# 3) if base then extensions on all its attributes (TODO: NOTE: in which case multiple_instance_clause may be needed)
-
1
def self.get_virtual_attributes__include_mixins(attrs_to_get,cols,field_to_match=:display_name)
-
ret = Hash.new
-
# TODO: may be able to avoid this loop
-
attrs_to_get.each do |component_id,hash_value|
-
attr_info = hash_value[:attribute_info]
-
component = hash_value[:component]
-
attr_names = attr_info.map{|a|a[:attribute_name].to_s}
-
rows = component.get_virtual_attributes__include_mixins(attr_names,cols,field_to_match)
-
rows.each do |attr|
-
attr_name = attr[field_to_match]
-
ret[component_id] ||= Hash.new
-
ret[component_id][attr_name] = attr
-
end
-
end
-
ret
-
end
-
-
1
def get_virtual_attributes__include_mixins(attribute_names,cols,field_to_match=:display_name,multiple_instance_clause=nil)
-
is_extension?() ?
-
get_virtual_attributes_aux_extension(attribute_names,cols,field_to_match,multiple_instance_clause) :
-
get_virtual_attributes_aux_base(attribute_names,cols,field_to_match,multiple_instance_clause)
-
end
-
-
1
def self.ret_component_with_namespace_for_node(cmp_mh, cmp_name, node_id, namespace, assembly)
-
ret_cmp, match_cmps = nil, []
-
display_name = display_name_from_user_friendly_name(cmp_name)
-
# display_name = cmp_name.gsub(/::/,"__")
-
sp_hash = {
-
:cols => [:id, :display_name, :module_branch_id, :type, :ref, :augmented_with_module_info],
-
:filter => [:and,
-
[:eq, :display_name, display_name],
-
# [:eq, :type, 'instance'],
-
# [:eq, :project_project_id, nil],
-
[:eq, :node_node_id, node_id]]
-
}
-
cmps = Model.get_objs(cmp_mh,sp_hash,:keep_ref_cols=>true)
-
-
if namespace
-
cmps.select!{|c| (c[:namespace] && c[:namespace][:display_name] == namespace)}
-
ret_cmp = cmps.first
-
else
-
return cmps.first if cmps.size == 1
-
-
opts = Opts.new(:with_namespace => true)
-
cmp_modules_for_assembly = assembly.list_component_modules(opts)
-
-
cmp_modules_for_assembly.each do |cmp_mod|
-
cmps.each do |cmp|
-
if cmp_module = cmp[:component_module]
-
match_cmps << cmp if cmp_module[:id] == cmp_mod[:id]
-
end
-
end
-
-
raise ErrorUsage.new("Multiple components matching component name you provided. Please use namespace:component format to delete component!") if match_cmps.size > 1
-
ret_cmp = match_cmps.first
-
end
-
end
-
-
ret_cmp
-
end
-
-
1
def self.get_component_instances_related_by_mixins(components,cols)
-
return Array.new if components.empty?
-
sample_cmp = components.first
-
component_mh = sample_cmp.model_handle()
-
# use base cmp id as equivalence class and find all members of equivalence class to find what each related component is
-
# associated with
-
cmp_id_to_equiv_class = Hash.new
-
equiv_class_members = Hash.new
-
ext_cmps = Array.new
-
base_cmp_info = Array.new
-
components.each do |cmp|
-
id = cmp[:id]
-
if cmp[:extended_base]
-
raise Error.new("cmp[:implementation_id] must be set") unless cmp[:implementation_id]
-
ext_cmps << cmp
-
extended_base_id = cmp[:extended_base_id]
-
base_cmp_info << {:id => extended_base_id, :node_node_id => cmp[:node_node_id], :extended_base => cmp[:extended_base], :implementation_id => cmp[:implementation_id]}
-
cmp_id_to_equiv_class[id] = (equiv_class_members[extended_base_id] ||= Array.new) << id
-
else
-
base_cmp_info << {:id => cmp[:id]}
-
cmp_id_to_equiv_class[id] = (equiv_class_members[id] ||= Array.new) << id
-
end
-
end
-
-
indexed_ret = Hash.new
-
get_components_related_by_mixins_from_extension(component_mh,ext_cmps,cols).each do |found_base_cmp|
-
id = found_base_cmp[:id]
-
# if found_base_cmp in components dont put in result
-
unless cmp_id_to_equiv_class[id]
-
indexed_ret[id] = found_base_cmp.merge(:assoc_component_ids => equiv_class_members[id])
-
end
-
end
-
-
get_components_related_by_mixins_from_base(component_mh,base_cmp_info,cols).each do |found_ext_cmp|
-
id = found_ext_cmp[:id]
-
# if found_ext_cmp in components dont put in result
-
unless cmp_id_to_equiv_class[id]
-
indexed_ret[id] = found_ext_cmp.merge(:assoc_component_ids => equiv_class_members[found_ext_cmp[:extended_base_id]])
-
end
-
end
-
indexed_ret.values
-
end
-
-
1
def self.create_subclass_object(cmp,subclass_model_name=nil)
-
cmp && cmp.id_handle().create_object(:model_name => subclass_model_name||model_name_with_subclass()).merge(cmp)
-
end
-
-
1
def is_assembly?()
-
"composite" == get_field?(:type)
-
end
-
1
def assembly?(opts={})
-
if is_assembly?()
-
Assembly.create_assembly_subclass_object(self)
-
end
-
end
-
-
1
def get_component_i18n_label()
-
ret = get_stored_component_i18n_label?()
-
return ret if ret
-
i18n = get_i18n_mappings_for_models(:component)
-
i18n_string(i18n,:component,self[:display_name])
-
end
-
-
1
def get_attribute_i18n_label(attribute)
-
ret = get_stored_attribute_i18n_label?(attribute)
-
return ret if ret
-
i18n = get_i18n_mappings_for_models(:attribute,:component)
-
i18n_string(i18n,:attribute,attribute[:display_name],self[:component_type])
-
end
-
-
1
def update_component_i18n_label(label)
-
update_hash = {:id => self[:id], :i18n_labels => {i18n_language() => {"component" => label}}}
-
Model.update_from_rows(model_handle,[update_hash],:partial_value=>true)
-
end
-
1
def update_attribute_i18n_label(attribute_name,label)
-
update_hash = {:id => self[:id], :i18n_labels => {i18n_language() => {"attributes" => {attribute_name => label}}}}
-
Model.update_from_rows(model_handle,[update_hash],:partial_value=>true)
-
end
-
-
# self is an instance and it finds a library component
-
# multiple_instance_clause is used in case multiple extensions of same type and need to select particular one
-
# TODO: extend with logic for multiple_instance_clause
-
1
def get_extension_in_library(extension_type,cols=[:id,:display_name],multiple_instance_clause=nil)
-
base_sp_hash = {
-
:model_name => :implementation,
-
:filter => [:eq, :id, self[:implementation_id]],
-
:cols => [:id,:ancestor_id]
-
}
-
join_array =
-
[
-
{
-
:model_name => :component,
-
:alias => :library_template,
-
:join_type => :inner,
-
:filter => [:eq, :extension_type, extension_type.to_s],
-
:convert => true,
-
:join_cond => {:implementation_id => :implementation__ancestor_id},
-
:cols => Aux.array_add?(cols,:implementation_id)
-
}
-
]
-
rows = Model.get_objects_from_join_array(model_handle(:implementation),base_sp_hash,join_array)
-
Log.error("get extension library shoudl only match one component") if rows.size > 1
-
rows.first && rows.first[:library_template]
-
end
-
-
1
def get_containing_node_id()
-
return self[:node_node_id] if self[:node_node_id]
-
row = get_objects_from_sp_hash(:columns => [:node_node_id,:containing_node_id_info]).first
-
row[:node_node_id]||(row[:parent_component]||{})[:node_node_id]
-
end
-
-
####################
-
1
def save_view_in_cache?(type,user_context)
-
ViewDefProcessor.save_view_in_cache?(type,id_handle(),user_context)
-
end
-
-
### object processing and access functions
-
1
def get_component_with_attributes_unraveled(attr_filters={:hidden => true})
-
sp_hash = {:columns => [:id,:display_name,:component_type,:basic_type,:attributes,:i18n_labels]}
-
component_and_attrs = get_objects_from_sp_hash(sp_hash)
-
return nil if component_and_attrs.empty?
-
component = component_and_attrs.first.subset(:id,:display_name,:component_type,:basic_type,:i18n_labels)
-
component_attrs = {:component_type => component[:component_type],:component_name => component[:display_name]}
-
filtered_attrs = component_and_attrs.map do |r|
-
attr = r[:attribute]
-
attr.merge(component_attrs) if attr and not attribute_is_filtered?(attr,attr_filters)
-
end.compact
-
attributes = AttributeComplexType.flatten_attribute_list(filtered_attrs)
-
component.merge(:attributes => attributes)
-
end
-
-
1
private
-
1
def sub_item_model_names()
-
[:node,:component]
-
end
-
-
1
def self.get_components_related_by_mixins_from_extension(component_mh,extension_cmps,cols)
-
return Array.new if extension_cmps.empty?
-
base_ids = extension_cmps.map{|cmp|cmp[:instance_extended_base_id]}
-
sp_hash = {
-
:model_name => :component,
-
:filter => [:oneof, :id, base_ids],
-
:cols => Aux.array_add?(cols,[:id])
-
}
-
get_objects_from_sp_hash(component_mh,sp_hash)
-
end
-
-
1
def self.get_components_related_by_mixins_from_base(component_mh,base_cmp_info,cols)
-
return Array.new if base_cmp_info.empty?
-
filter =
-
if base_cmp_info.size == 1
-
extended_base_id_filter(base_cmp_info.first)
-
else
-
[:or] + base_cmp_info.map{|item|extended_base_id_filter(item)}
-
end
-
sp_hash = {
-
:model_name => :component,
-
:filter => filter,
-
:cols => Aux.array_add?(cols,[:id])
-
}
-
get_objects_from_sp_hash(component_mh,sp_hash)
-
end
-
-
1
def self.extended_base_id_filter(base_cmp_info_item)
-
if base_cmp_info_item[:extended_base]
-
[:and,[:eq, :implementation_id, base_cmp_info_item[:implementation_id]],
-
[:eq,:node_node_id,base_cmp_info_item[:node_node_id]],
-
[:eq,:extended_base, base_cmp_info_item[:extended_base]]]
-
else
-
[:eq, :id, base_cmp_info_item[:id]]
-
end
-
end
-
-
1
def get_virtual_attributes_aux_extension(attribute_names,cols,field_to_match=:display_name,multiple_instance_clause=nil)
-
component_id = self[:id]
-
base_id = self[:extended_base_id]
-
sp_hash = {
-
:model_name => :attribute,
-
:filter => [:and,
-
[:oneof, field_to_match, attribute_names],
-
[:oneof, :component_component_id, [component_id,base_id]]],
-
:cols => Aux.array_add?(cols,[:component_component_id,field_to_match])
-
}
-
attr_mh = model_handle().createMH(:attribute)
-
Model.get_objects_from_sp_hash(attr_mh,sp_hash)
-
end
-
-
1
def get_virtual_attributes_aux_base(attribute_names,cols,field_to_match=:display_name,multiple_instance_clause=nil)
-
raise Error.new("Should not be called unless :component_type and :implementation_id are set") unless self[:component_type] and self[:implementation_id]
-
component_id = self[:id]
-
base_sp_hash = {
-
:model_name => :component,
-
:filter => [:and,
-
[:eq, :node_node_id, self[:node_node_id]],
-
[:eq, :implementation_id, self[:implementation_id]],
-
[:or, [:eq, :extended_base, self[:component_type]],[:eq, :id, self[:id]]]],
-
:cols => [:id,:extended_base,:implementation_id]
-
}
-
join_array =
-
[{
-
:model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:filter => [:oneof, field_to_match, attribute_names],
-
:join_cond => {:component_component_id => :component__id},
-
:cols => Aux.array_add?(cols,[:component_component_id,field_to_match])
-
}]
-
Model.get_objects_from_join_array(model_handle,base_sp_hash,join_array).map{|r|r[:attribute]}
-
end
-
-
# only filters if value is known
-
1
def attribute_is_filtered?(attribute,attr_filters)
-
return false if attr_filters.empty?
-
attr_filters.each{|k,v|return true if attribute[k] == v}
-
false
-
end
-
-
1
public
-
-
1
def get_view_meta(view_type,virtual_model_ref)
-
from_db = get_instance_layout_from_db(view_type)
-
virtual_model_ref.set_view_meta_info(from_db[:id],from_db[:updated_at]) if from_db
-
-
layout_def = (from_db||{})[:def] || Layout.create_def_from_field_def(get_field_def(),view_type)
-
create_view_meta_from_layout_def(view_type,layout_def)
-
end
-
-
1
def get_view_meta_info(view_type)
-
# TODO: can be more efficient (rather than using get_instance_layout_from_db can use something that returns most recent laypout id); also not sure whether if no db hit to return id()
-
from_db = get_instance_layout_from_db(view_type)
-
return [from_db[:id],from_db[:updated_at]] if from_db
-
[id(),Time.new()]
-
end
-
-
1
def get_layouts(view_type)
-
from_db = get_layouts_from_db(view_type)
-
return from_db unless from_db.empty?
-
Layout.create_and_save_from_field_def(id_handle(),get_field_def(),view_type)
-
get_layouts_from_db(view_type)
-
end
-
-
1
def add_layout(layout_info)
-
Layout.save(id_handle(),layout_info)
-
end
-
-
1
protected
-
1
def get_layouts_from_db(view_type,layout_vc=:layouts)
-
unprocessed_rows = get_objects_col_from_sp_hash({:columns => [layout_vc]},:layout)
-
# TODO: more efficient would be to use db sort
-
unprocessed_rows.select{|l|l[:type] == view_type.to_s}.sort{|a,b|b[:updated_at] <=> a[:updated_at]}
-
end
-
-
1
def get_instance_layout_from_db(view_type)
-
# TODO: more efficient would be to use db limit
-
instance_layout = get_layouts_from_db(view_type,:layouts).first
-
return instance_layout if instance_layout
-
instance_layout = get_layouts_from_db(view_type,:layouts_from_ancestor).first
-
return instance_layout if instance_layout
-
end
-
1
public
-
-
# TODO: wil be deperacted
-
1
def get_info_for_view_def()
-
sp_hash = {:columns => [:id,:display_name,:component_type,:basic_type,:attributes_view_def_info]}
-
component_and_attrs = get_objects_from_sp_hash(sp_hash)
-
return nil if component_and_attrs.empty?
-
component = component_and_attrs.first.subset_with_vcs(:id,:display_name,:component_type,:basic_type,:view_def_key)
-
# if component_and_attrs.first[:attribute] null there shoudl only be one element in component_and_attrs
-
return component.merge(:attributes => Array.new) unless component_and_attrs.first[:attribute]
-
opts = {:flatten_nil_value => true}
-
component.merge(:attributes => AttributeComplexType.flatten_attribute_list(component_and_attrs.map{|r|r[:attribute]},opts))
-
end
-
-
1
def get_attributes_unraveled(to_set={},opts={})
-
sp_hash = {
-
:filter => [:and,
-
[:eq, :hidden, false]],
-
:columns => [:id,:display_name,:component_component_id,:attribute_value,:semantic_type,:semantic_type_summary,:data_type,:required,:dynamic,:cannot_change,:port_type,:read_only]
-
}
-
raw_attributes = get_children_from_sp_hash(:attribute,sp_hash)
-
return Array.new if raw_attributes.empty?
-
if to_set.has_key?(:component_id)
-
sample = raw_attributes.first
-
to_set[:component_id] = sample[:component_component_id]
-
end
-
-
flattened_attr_list = AttributeComplexType.flatten_attribute_list(raw_attributes,opts)
-
i18n = get_i18n_mappings_for_models(:attribute)
-
flattened_attr_list.map do |a|
-
unless a[:hidden]
-
name = a[:display_name]
-
{
-
:id => a[:unraveled_attribute_id],
-
:name => name,
-
:value => a[:attribute_value],
-
:i18n => i18n_string(i18n,:attribute,name),
-
:is_readonly => a.is_readonly?
-
}
-
end
-
end.compact
-
end
-
-
1
def get_virtual_object_attributes(opts={})
-
to_set = {:component_id => nil}
-
attrs = get_attributes_unraveled(to_set)
-
vals = attrs.inject({:id=>to_set[:component_id]}){|h,a|h.merge(a[:name].to_sym => a[:value])}
-
if opts[:ret_ids]
-
ids = attrs.inject({}){|h,a|h.merge(a[:name].to_sym => a[:id])}
-
return [vals,ids]
-
end
-
vals
-
end
-
-
1
def add_model_specific_override_attrs!(override_attrs,target_obj)
-
# TODO: taking out below to accomidate fact that using ref to qialify whether chef or puppet
-
# TODO: think want to add way for components that can have many attributes to have this based on value of the
-
# attribut ethat serves as the key
-
# override_attrs[:display_name] ||= SQL::ColRef.qualified_ref
-
into_node = (target_obj.model_handle[:model_name] == :node)
-
override_attrs[:type] ||= (into_node ? "instance" : "template")
-
override_attrs[:updated] ||= false
-
end
-
-
###### Helper fns
-
1
def get_contained_attribute_ids(opts={})
-
parent_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
nested_cmps = get_objects(ModelHandle.new(id_handle[:c],:component),nil,:parent_id => parent_id)
-
-
(get_directly_contained_object_ids(:attribute)||[]) +
-
(nested_cmps||[]).map{|cmp|cmp.get_contained_attribute_ids(opts)}.flatten()
-
end
-
-
# type can be :asserted, :derived or :value
-
1
def get_contained_attribute_values(type,opts={})
-
parent_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
nested_cmps = get_objects(ModelHandle.new(id_handle[:c],:component),nil,:parent_id => parent_id)
-
-
ret = Hash.new
-
(nested_cmps||[]).each do |cmp|
-
values = cmp.get_contained_attribute_values(type,opts)
-
if values
-
ret[:component] ||= Hash.new
-
ret[:component][cmp.get_qualified_ref.to_sym] = values
-
end
-
end
-
dir_vals = get_direct_attribute_values(type,opts)
-
ret[:attribute] = dir_vals if dir_vals
-
ret
-
end
-
-
1
def get_direct_attribute_values(type,opts={})
-
parent_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
attr_val_array = Model.get_objects(ModelHandle.new(c,:attribute),nil,:parent_id => parent_id)
-
-
return nil if attr_val_array.nil?
-
return nil if attr_val_array.empty?
-
ret = {}
-
attr_type = {:asserted => :value_asserted, :derived => :value_derived, :value => :attribute_value}[type]
-
attr_val_array.each do |attr|
-
v = {:value => attr[attr_type],:id => attr[:id]}
-
opts[:attr_include].each{|a|v[a]=attr[a]} if opts[:attr_include]
-
ret[attr.get_qualified_ref.to_sym] = v
-
end
-
ret
-
end
-
-
1
def get_objects_associated_nodes()
-
assocs = Model.get_objects(ModelHandle.new(@c,:assoc_node_component),:component_id => self[:id])
-
return Array.new if assocs.nil?
-
assocs.map{|assoc|Model.get_object(IDHandle[:c=>@c,:guid => assoc[:node_id]])}
-
end
-
-
1
def get_obj_with_common_cols()
-
common_cols = self.class.common_columns()
-
ret = get_objs(:cols => common_cols).first
-
ret.materialize!(common_cols)
-
end
-
-
1
def get_stored_attribute_i18n_label?(attribute)
-
return nil unless self[:i18n_labels]
-
((self[:i18n_labels][i18n_language()]||{})["attributes"]||{})[attribute[:display_name]]
-
end
-
1
def get_stored_component_i18n_label?()
-
return nil unless self[:i18n_labels]
-
((self[:i18n_labels][i18n_language()]||{})["component"]||{})[self[:display_name]]
-
end
-
-
end
-
end
-
-
# TODO: determine what in this file is deprecated
-
1
module DTK
-
1
module ComponentClone
-
1
def clone_pre_copy_hook_into_node(node,opts={})
-
workspace_cmp = self
-
# check constraints
-
unless opts[:no_constraint_checking]
-
if constraints = workspace_cmp.get_constraints!(:update_object => true)
-
target = {"target_node_id_handle" => node.id_handle_with_auth_info()}
-
constraint_opts = {:raise_error_when_error_violation => true, :update_object => workspace_cmp}
-
constraints.evaluate_given_target(target,constraint_opts)
-
end
-
end
-
workspace_cmp
-
end
-
-
1
def determine_cloned_components_parent(specified_target_idh)
-
# TODO: may deprecate if not using; previously mapped extensions to parents; now putting them with node as tehir parent
-
return specified_target_idh if SubComponentComponentMapping.empty?
-
cmp_fs = FieldSet.opt([:id,:display_name,:component_type],:component)
-
specified_target_id = specified_target_idh.get_id()
-
cmp_ds = Model.get_objects_just_dataset(model_handle,{:id => id()},cmp_fs)
-
mapping_ds = SQL::ArrayDataset.create(self.class.db,SubComponentComponentMapping,model_handle.createMH(:mapping))
-
-
first_join_ds = cmp_ds.graph(:inner,mapping_ds,{:component => :component_type})
-
-
parent_cmp_ds = Model.get_objects_just_dataset(model_handle,{:node_node_id => specified_target_idh.get_id()},cmp_fs)
-
-
final_join_ds = first_join_ds.graph(:inner,parent_cmp_ds,{:component_type => :parent},{:convert => true})
-
-
target_info = final_join_ds.all().first
-
return specified_target_idh unless target_info
-
target_info[:component2].id_handle()
-
end
-
-
1
SubComponentComponentMapping =
-
[
-
# {:component => "postgresql__db", :parent => "postgresql__server"}
-
]
-
-
1
def clone_post_copy_hook(clone_copy_output,opts={})
-
component_idh = clone_copy_output.id_handles.first
-
add_needed_sap_attributes(component_idh)
-
parent_action_id_handle = id_handle().get_top_container_id_handle(:datacenter)
-
StateChange.create_pending_change_item(:new_item => component_idh, :parent => parent_action_id_handle)
-
end
-
-
1
def source_clone_info_opts()
-
raise Error.new("component#source_clone_info_opts is deprecated")
-
{:ret_new_obj_with_cols => [:id,:implementation_id,:component_type,:version,:ancestor_id]}
-
end
-
-
1
def add_needed_sap_attributes(component_idh)
-
sp_hash = {
-
:filter => [:and, [:oneof, :basic_type, BasicTypeInfo.keys]],
-
:columns => [:id, :display_name,:basic_type]
-
}
-
component = component_idh.get_objects_from_sp_hash(sp_hash).first
-
return nil unless component
-
-
basic_type_info = BasicTypeInfo[component[:basic_type]]
-
sap_dep = basic_type_info[:sap_dependency]
-
-
sap_info = component.get_objects_from_sp_hash(:columns => [:id, :display_name, sap_dep]).first
-
unless sap_info
-
Log.error("error in finding sap dependencies for component #{component_idh}")
-
return nil
-
end
-
-
sap_config_attr = sap_info[:attribute]
-
par_attr = sap_info[:parent_attribute]
-
node = sap_info[:node]
-
-
sap_val = basic_type_info[:fn].call(sap_config_attr[:attribute_value],par_attr[:attribute_value])
-
sap_attr_row = Aux::hash_subset(basic_type_info,[{:sap => :ref},{:sap => :display_name},:description,:semantic_type,:semantic_type_summary])
-
sap_attr_row.merge!(
-
:component_component_id => component[:id],
-
:value_derived => sap_val,
-
:is_port => true,
-
:hidden => true,
-
:data_type => "json")
-
-
attr_mh = component_idh.createMH(:model_name => :attribute, :parent_model_name => :component)
-
sap_attr_idh = self.class.create_from_row(attr_mh,sap_attr_row, :convert => true)
-
-
return nil unless sap_attr_idh
-
AttributeLink.create_links_sap(basic_type_info,sap_attr_idh,sap_config_attr.id_handle(),par_attr.id_handle(),node.id_handle())
-
end
-
-
1
protected
-
1
def self.compute_sap_db(sap_config_val,par_vals)
-
# TODO: check if it is this simple; also may not need and propagate as byproduct of adding a link
-
par_vals.map{|par_val|sap_config_val.merge(par_val)}
-
end
-
1
private
-
# TODO: some of these are redendant of whats in sap_dependency_X like "sap__l4" and "sap__db"
-
1
BasicTypeInfo = {
-
"database" => {
-
:sap_dependency => :sap_dependency_database,
-
:sap => "sap__db",
-
:sap_config => "sap_config__db",
-
:sap_config_fn_name => "sap_config_conn__db",
-
:parent_attr => "sap__l4",
-
:parent_fn_name => "sap_conn__l4__db",
-
:semantic_type => {":array" => "sap__db"}, #TODO: need the => {"application" => service qualification)
-
:semantic_type_summary => "sap__db",
-
:description => "DB access point",
-
:fn => lambda{|sap_config,par|compute_sap_db(sap_config,par)}
-
}
-
}
-
end
-
end
-
2
module DTK; class Component
-
1
module Dependency
-
1
def self.get_nested_dependencies(component_idhs)
-
ret = Array.new
-
return ret if component_idhs.empty?
-
cmp_cols = [:id,:group_id,:only_one_per_node,:component_type,:extended_base,:implementation_id]
-
sp_hash = {
-
:cols => [:dependencies] + cmp_cols,
-
:filter => [:oneof,:id,component_idhs.map{|idh|idh.get_id()}]
-
}
-
cmp_mh = component_idhs.first.createMH()
-
# if agree on component id thean all attributes same execpet for dependencies
-
ndx_ret = Hash.new
-
# aggregate dependencies under the component it is nested on
-
Component.get_objs(cmp_mh,sp_hash,:keep_ref_cols => true).each do |aug_cmp|
-
# confugsing that from Component.get_objs :dependencies will be hash and we are using same field as an array
-
dep = aug_cmp[:dependencies]
-
pntr = ndx_ret[aug_cmp[:id]] ||= aug_cmp.merge(:dependencies => Array.new)
-
pntr[:dependencies] << dep if dep
-
end
-
ndx_ret.values
-
end
-
-
1
module ClassMixin
-
# returns hash with ndx component_id and keys :constraints, :component
-
# opts can have key :when_evaluated
-
1
def get_ndx_constraints(component_idhs,opts={})
-
ret = Hash.new
-
return ret if component_idhs.empty?
-
cmp_cols = [:id,:group_id,:only_one_per_node,:component_type,:extended_base,:implementation_id]
-
ret = Dependency.get_nested_dependencies(component_idhs).inject(Hash.new) do |h,r|
-
constraints = r[:dependencies].map{|dep|Constraint.create(dep)}
-
h.merge(r[:id] => {:constraints => constraints, :component => r.slice(*cmp_cols)})
-
end
-
ret.each_value do |r|
-
cmp = r[:component]
-
unless opts[:when_evaluated] == :after_cmp_added
-
# these shoudl only be evaluated before component is evaluated
-
r[:constraints] << Constraint::Macro.only_one_per_node(cmp[:component_type]) if cmp[:only_one_per_node]
-
r[:constraints] << Constraint::Macro.base_for_extension(cmp) if cmp[:extended_base]
-
end
-
end
-
ret
-
end
-
end
-
1
module Mixin
-
# TODO: may deprecate this to be in terms of get_ndx_constraints
-
1
def get_constraints!(opts={})
-
# TODO: may see if precalculating more is more efficient
-
cmp_cols = [:only_one_per_node,:component_type,:extended_base,:implementation_id]
-
rows = get_objs(:cols => [:dependencies] + cmp_cols)
-
cmp_info = rows.first #just picking first since component info same for all rows
-
cmp_cols.each{|col|self[col] = cmp_info[col]} if opts[:update_object]
-
-
constraints = rows.map{|r|Constraint.create(r[:dependencies]) if r[:dependencies]}.compact
-
constraints << Constraint::Macro.only_one_per_node(cmp_info[:component_type]) if cmp_info[:only_one_per_node]
-
constraints << Constraint::Macro.base_for_extension(cmp_info) if cmp_info[:extended_base]
-
-
return Constraints.new() if constraints.empty?
-
Constraints.new(:and,constraints)
-
end
-
end
-
end
-
end; end
-
-
# TODO: will move get methods that will not be deprecating to here or some file underneath a file directory
-
2
module DTK; class Component
-
1
module GetMethod
-
1
module Mixin
-
1
def get_augmented_link_defs()
-
ndx_ret = Hash.new
-
get_objs(:cols => [:link_def_links]).each do |r|
-
link_def = r[:link_def]
-
pntr = ndx_ret[link_def[:id]] ||= link_def.merge(:link_def_links => Array.new)
-
pntr[:link_def_links] << r[:link_def_link]
-
end
-
ret = ndx_ret.values()
-
ret.each{|r|r[:link_def_links].sort!{|a,b|a[:position] <=> b[:position]}}
-
ret
-
end
-
-
1
def get_node()
-
get_obj_helper(:node)
-
end
-
end
-
-
1
module ClassMixin
-
1
def get_include_modules(component_idhs,opts={})
-
get_component_children(component_idhs,IncludeModule,:component_include_module,opts)
-
end
-
-
1
def get_attributes(component_idhs,opts={})
-
get_component_children(component_idhs,::DTK::Attribute,:attribute,opts)
-
end
-
-
1
def get_implementations(component_idhs)
-
ret = Array.new
-
return ret if component_idhs.empty?
-
mh = component_idhs.first.createMH()
-
get_objs(mh,sp_hash([:implementation],:id, component_idhs)).map{|r|r[:implementation]}
-
end
-
-
1
private
-
1
def get_component_children(component_idhs,child_class,child_model_name,opts={})
-
ret = Array.new
-
return ret if component_idhs.empty?
-
mh = component_idhs.first.create_childMH(child_model_name)
-
cols = opts[:cols] || child_class.common_columns()
-
if cols_plus = opts[:cols_plus]
-
cols = (cols + opts[:cols_plus]).uniq
-
end
-
get_objs(mh,sp_hash(cols,mh.parent_id_field_name,component_idhs))
-
end
-
-
1
def sp_hash(cols,cmp_id_field,component_idhs)
-
{
-
:cols => cols,
-
:filter => [:oneof, cmp_id_field, component_idhs.map{|idh|idh.get_id()}]
-
}
-
end
-
end
-
end
-
end; end
-
-
-
2
module DTK; class Component
-
1
class IncludeModule < Model
-
1
def self.common_columns()
-
[:id,:group_id,:display_name,:version_constraint]
-
end
-
-
1
def module_name()
-
get_field?(:display_name)
-
end
-
-
# For all components in components, this method returns its implementation plus
-
# does recursive anaysis to follow the components includes to find other components that must be included also
-
1
def self.get_matching_implementations(assembly_instance,component_idhs)
-
# TODO: check that Component.get_implementations is consistent with what ModuleRefs::Lock returns
-
# with respect to namespace resolution
-
ret = Component.get_implementations(component_idhs)
-
include_modules = get_include_modules(component_idhs)
-
return ret if include_modules.empty?()
-
-
unless assembly_instance
-
Log.error("Unexpected that assembly_instance is nil in IncludeModule.get_matching_implementations; not putting in includes")
-
return ret
-
end
-
-
# Add to the impls in ret the ones gotten by following the include moulde links
-
# using ndx_ret to get rid of duplicates
-
# includes are indexed on components, so at first level get component modules, but then can only see what component modules
-
# are includes using ModuleRefs::Lock
-
ndx_ret = ret.inject(Hash.new){|h,impl|h.merge(impl.id => impl)}
-
locked_module_refs = ModuleRefs::Lock.get(assembly_instance,:types=>[:locked_dependencies,:locked_branch_shas])
-
included_impls = locked_module_refs.matching_impls_with_children(include_modules.map{|im|im.module_name()})
-
ndx_ret = included_impls.inject(ndx_ret){|h,impl|h.merge(impl.id => impl)}
-
ndx_ret.values
-
end
-
-
1
private
-
1
def self.get_include_modules(component_idhs)
-
Component.get_include_modules(component_idhs,:cols => common_columns())
-
end
-
end
-
end; end
-
-
-
2
module DTK; class Component
-
1
class Instance < self
-
-
1
r8_nested_require('instance','interpreted')
-
-
1
def self.get_objs(mh,sp_hash,opts={})
-
# TODO: might want to change to just :model_name == component_instance
-
if [:component,:component_instance].include?(mh[:model_name])
-
super(mh.createMH(:component),sp_hash,opts).map{|cmp|create_from_component(cmp)}
-
else
-
super
-
end
-
end
-
-
1
def self.create_from_component(cmp)
-
cmp && cmp.id_handle().create_object(:model_name => :component_instance).merge(cmp)
-
end
-
-
1
def self.component_list_fields()
-
2
[:id,:group_id,:display_name,:component_type,:implementation_id,:basic_type,:version,:only_one_per_node,:external_ref,:node_node_id,:extended_base,:ancestor_id]
-
end
-
-
1
def self.get_matching?(node_idh,component_type,component_title)
-
sp_hash = {
-
:cols => [:id,:display_name,:component_type,:ref],
-
:filter => [:and,[:eq,:node_node_id,node_idh.get_id()],
-
filter(component_type,component_title)
-
]
-
}
-
cmp = Model.get_obj(node_idh.createMH(:component),sp_hash)
-
cmp && create_from_component(cmp)
-
end
-
-
1
def has_title?()
-
ComponentTitle.title?(self)
-
end
-
-
1
def self.filter(component_type,component_title=nil)
-
[:eq,:display_name,ComponentTitle.display_name_with_title?(component_type,component_title)]
-
end
-
-
1
def self.set_title_attribute(cmp_idh,component_title,title_attr_name=nil)
-
title_attr_name ||= 'name'
-
ref = title_attr_name
-
sp_hash = {
-
:cols => [:id,:value_asserted],
-
:filter => [:and,[:eq,:display_name,title_attr_name],
-
[:eq,:component_component_id,cmp_idh.get_id()]]
-
}
-
unless title_attr = get_obj(cmp_idh.createMH(:attribute),sp_hash)
-
Log.error("Unexpected that cannot find the title attribute")
-
return
-
end
-
if title_attr[:value_asserted]
-
Log.error("Unexpected that title attribute has value_asserted when set_title_attribute called")
-
end
-
title_attr.update(:value_asserted=>component_title,:cannot_change=>true,:is_instance_value=>true)
-
end
-
-
1
def add_title_field?()
-
self.class.add_title_fields?([self])
-
self
-
end
-
1
def self.add_title_fields?(cmps)
-
ret = cmps
-
# TODO: for efficiency can look at ref it exsits and see if it indicates a title
-
cmps_needing_titles = cmps.select do |cmp|
-
cmp[:title].nil? and cmp.get_field?(:only_one_per_node) == false
-
end
-
return ret if cmps_needing_titles.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_component_id,:title],
-
:filter => [:oneof, :component_component_id, cmps_needing_titles.map{|cmp|cmp[:id]}]
-
}
-
ndx_attrs = Hash.new
-
get_objs(cmps.first.model_handle(:attribute),sp_hash).each do |a|
-
if title = a[:title]
-
ndx_attrs[a[:component_component_id]] = title
-
end
-
end
-
cmps_needing_titles.each do |cmp|
-
if title = ndx_attrs[cmp[:id]]
-
cmp[:title] = title
-
end
-
end
-
ret
-
end
-
-
1
def self.add_action_defs!(cmp_instances,opts={})
-
# add action defs that are from the template it is linked to
-
ndx_template_idhs = Hash.new
-
ndx_template_id_to_instances = Hash.new
-
cmp_instances.each do |cmp_instance|
-
template_id = cmp_instance.get_field?(:ancestor_id)
-
ndx_template_idhs[template_id] ||= cmp_instance.id_handle(:id => template_id)
-
(ndx_template_id_to_instances[template_id] ||= Array.new) << cmp_instance
-
end
-
-
ActionDef.get_ndx_action_defs(ndx_template_idhs.values,opts).each_pair do |template_id,action_defs|
-
ndx_template_id_to_instances[template_id].each do |cmp_instance|
-
cmp_instance[:action_defs] = action_defs
-
end
-
end
-
end
-
-
# these are port links that are connected on either end to the components in component_idhs
-
1
def self.get_port_links(component_idhs)
-
ret = Array.new
-
return ret if component_idhs.empty?
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:oneof, :component_id, component_idhs.map{|idh|idh.get_id()}]
-
}
-
port_mh = component_idhs.first.createMH(:port)
-
port_ids = Model.get_objs(port_mh,sp_hash).map{|r|r[:id]}
-
return ret if port_ids.empty?
-
-
sp_hash = {
-
:cols => PortLink.common_columns(),
-
:filter => [:or, [:oneof, :input_id, port_ids], [:oneof, :output_id, port_ids]]
-
}
-
port_link_mh = component_idhs.first.createMH(:port_link)
-
Model.get_objs(port_link_mh,sp_hash)
-
end
-
-
1
def get_component_template_parent()
-
unless row = get_obj(:cols => [:instance_component_template_parent])
-
raise Error.new("Unexpected that get_component_template_parent() called and nil result")
-
end
-
Component::Template.create_from_component(row[:component_template])
-
end
-
-
1
def self.get_ndx_intra_node_rels(cmp_idhs)
-
cmps_with_deps = Component::Instance.get_components_with_dependency_info(cmp_idhs)
-
ComponentOrder.get_ndx_cmp_type_and_derived_order(cmps_with_deps)
-
end
-
-
# TODO: may be able to deprecate below seeing that dependencies are on instances
-
1
def self.get_components_with_dependency_info(cmp_idhs)
-
ret = Array.new
-
return ret if cmp_idhs.empty?
-
sp_hash = {
-
:cols => [:id,:inherited_dependencies, :extended_base, :component_type],
-
:filter => [:oneof, :id, cmp_idhs.map{|idh|idh.get_id()}]
-
}
-
cmp_mh = cmp_idhs.first.createMH()
-
Model.get_objs(cmp_mh,sp_hash)
-
end
-
-
1
def print_form()
-
self.class.print_form(self)
-
end
-
-
1
def self.print_form(component, namespace=nil)
-
ret = component.get_field?(:display_name).gsub(/__/,"::")
-
# removed namespace from list-components list (task DTK-1603)
-
# ret = "#{namespace[:display_name]}/#{ret}" if namespace
-
-
ret
-
end
-
-
1
def self.legal_display_name?(display_name)
-
!ComponentTitle.parse_component_display_name(display_name).nil?
-
end
-
-
1
def self.version_print_form(component)
-
ModuleBranch.version_from_version_field(component.get_field?(:version))
-
end
-
end
-
end; end
-
=begin
-
There might be anumber of ways to encode this; such as actually adding to schema; one direction looking towards is having effectively foreign
-
keys where for example the linux user can point to a linux user table.
-
In approach below teher wil be a numeric key genearted which is a handle on object; sometimes an attribute may be key, but not sure always
-
=end
-
2
module DTK; class Component
-
1
class Instance
-
1
class Interpreted < self
-
# idempotent; if reassert twice with same valeus it does not change
-
# also if assert with less keys; it delete those ones omitted
-
1
def self.create_or_update?(node,component_type,attr_hash)
-
component_type = component_type.to_s
-
raise ErrorUsage.new("Not able to find 'key_name' in provided data, 'key_name' is required field") unless attr_hash[:key_name]
-
internal_hash = HashForm.convert_to_internal(attr_hash[:key_name], component_type,node.id, attr_hash)
-
update_from_hash_assignments(node.id_handle(),internal_hash)
-
get_component(node,attr_hash[:key_name],component_type).id_handle()
-
end
-
-
1
def self.delete(node, component_type, attr_hash)
-
raise ErrorUsage.new("Not able to find 'key_name' in provided data, 'key_name' is required field") unless attr_hash[:key_name]
-
cmp = get_component(node,attr_hash[:key_name], component_type.to_s)
-
-
Model.delete_instance(cmp.id_handle()) if cmp
-
end
-
-
1
def self.get_attribute_hash(node, component_id)
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id,:value_asserted],
-
:filter => [:and, [:eq,:component_component_id, component_id], [:neq,:display_name, 'key_content']]
-
}
-
get_objs(node.model_handle(:attribute),sp_hash).inject(AttributeHash.new) do |h,r|
-
h.merge(r[:display_name] => r[:value_asserted])
-
end
-
end
-
-
1
def self.find_candidates(assembly, system_user, pub_name, agent_action, target_nodes = [])
-
results = list_ssh_access(assembly)
-
-
nodes = target_nodes.empty? ? assembly.get_nodes(:id,:display_name,:external_ref) : target_nodes
-
-
#
-
# if :grant_access than rejected_bool ==> false (keep if not matched)
-
# if :revoke_access than rejected_bool ==> true (keep only if matched)
-
#
-
rejected_bool = (agent_action.to_sym == :revoke_access)
-
-
nodes.reject! do |node|
-
is_rejected = rejected_bool
-
results.each do |r|
-
if node[:display_name] == r[:node_name]
-
if r[:attributes]["linux_user"].eql?(system_user) && r[:attributes]["key_name"].eql?(pub_name)
-
is_rejected = !rejected_bool
-
end
-
end
-
end
-
is_rejected
-
end
-
-
nodes
-
end
-
-
-
-
1
def self.list_ssh_access(assembly, component_type = :authorized_ssh_public_key)
-
nodes = assembly.get_nodes()
-
-
result_array = []
-
-
nodes.each do |node|
-
sp_hash = {
-
:cols => [:id, :display_name],
-
:filter => [:and,[:eq,:node_node_id,node.id],[:eq, :component_type, component_type.to_s]]
-
}
-
-
components = get_objs(assembly.model_handle(:component_instance),sp_hash)
-
-
components.each do |cmp|
-
result_array << { :node_name => node.display_name, :attributes => get_attribute_hash(node, cmp.id) }
-
end
-
end
-
-
result_array
-
end
-
-
1
private
-
# TODO: probably better if this returns a Component::Instance:Interpreted object
-
1
def self.get_component(node,component_name,component_type)
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id],
-
:filter => [:and,[:eq,:display_name,component_name],[:eq,:node_node_id,node.id()],[:eq, :component_type, component_type.to_s]]
-
}
-
get_obj(node.model_handle(:component_instance),sp_hash)
-
end
-
-
1
class AttributeHash < Hash
-
end
-
1
class HashForm
-
1
def self.convert_to_internal(component_name, component_type, node_id,attr_hash)
-
{
-
:component => {
-
"#{component_type}-#{component_name}" => {
-
:display_name => component_name,
-
:component_type => component_type,
-
:type => 'action_effects', #TODO: might make this instead 'interpreted'
-
:attribute => attributes(attr_hash, node_id)
-
}
-
}
-
}
-
end
-
1
private
-
1
def self.attributes(input_attr_hash, node_id)
-
# what this does is to capture that what is in this is the complete set of attribute
-
results = input_attr_hash.inject(DBUpdateHash.new().mark_as_complete()) do |h,(k,v)|
-
attr_fields = {
-
:display_name => k.to_s,
-
:value_asserted => v.to_s,
-
:data_type => 'string',
-
:semantic_data_type => 'string'
-
}
-
-
h.merge(k => attr_fields)
-
end
-
-
results
-
end
-
end
-
end
-
end
-
end; end
-
# TODO: temp until move into meta directory
-
1
module DTK
-
1
module ComponentMetaClassMixin
-
1
def up()
-
1
ds_column_defs :ds_attributes, :ds_key
-
1
external_ref_column_defs()
-
1
virtual_column :name, :type => :varchar, :local_dependencies => [:display_name]
-
1
virtual_column :config_agent_type, :type => :string, :local_dependencies => [:external_ref]
-
-
# columns related to name/labels
-
# specfic labels of components and its attributes
-
1
column :keys, :json #only used if only_one_per_node is false; array of keys for displaying component name
-
1
column :i18n_labels, :json, :ret_keys_as_symbols => false
-
-
# columns related to version
-
# TODO: think we want to deprecate these; versioning is at module level
-
1
column :version, :varchar, :size => 100 #non-normalized: comes from module_branch
-
1
column :updated, :boolean, :default => false
-
-
# columns related to type
-
1
column :type, :varchar, :size => 15, :default => "template" # instance | composite | template
-
# top level in component type hiererarchy
-
1
column :basic_type, :varchar, :size => 25 #service, application, language, application, extension, database, user
-
# leaf type in component type
-
1
column :specific_type, :varchar, :size => 30
-
1
column :component_type, :varchar, :size => 50 #this is the exact component type; two instances taht share this can differ by things like defaults
-
-
1
column :locked_sha, :varchar, :size => 50
-
-
# if set to true only one instance of a component (using component_type to determine 'same') can be on a node
-
1
column :only_one_per_node, :boolean, :default => true
-
# refernce used when multiple isnatnces of same component type
-
# TODO: make sure that this is preserved under clone; case to watch out fro is when cloning for example more dbs in something with dbs
-
1
virtual_column :multiple_instance_ref, :type => :integer ,:local_dependencies => [:ref_num]
-
1
foreign_key :ng_component_id, :component, FK_SET_NULL_OPT #set when created by cloning from component node group
-
-
# used when this component is an extension
-
1
column :extended_base, :varchar, :size => 30
-
1
virtual_column :extended_base_id, :type => ID_TYPES[:id] ,:local_dependencies => [:extended_base,:implementation_id]
-
1
virtual_column :instance_extended_base_id, :type => ID_TYPES[:id] ,:local_dependencies => [:extended_base,:implementation_id,:node_node_id]
-
1
column :extension_type, :varchar, :size => 30
-
-
-
1
column :from_on_create_event, :boolean, :default => false
-
-
1
column :uri, :varchar
-
1
column :ui, :json
-
#:assembly_id (in contrast to parent field :component_id) is for tieing teh component to a composite component which is not a container
-
1
foreign_key :assembly_id, :component, FK_SET_NULL_OPT
-
1
column :view_def_ref, :varchar
-
-
# TODO: change if multiple implementations per component
-
1
foreign_key :implementation_id, :implementation, FK_SET_NULL_OPT
-
1
foreign_key :module_branch_id, :module_branch, FK_CASCADE_OPT #treated as containment
-
-
# TODO: thionk this can be deprecated
-
1
column :link_defs, :json
-
# deprecate below for above
-
# TODO: for efficiency materialize and if so have two variants of :component_parent for attribute; one for input, which brings in :connectivity_profile and other for output which deos not
-
1
virtual_column :link_defs_external, :type => :json, :local_dependencies => [:link_defs,:component_type,:specific_type,:basic_type]
-
1
virtual_column :connectivity_profile_internal, :type => :json, :local_dependencies => [:link_defs,:component_type,:specific_type,:basic_type]
-
1
virtual_column :most_specific_type, :type => :varchar, :local_dependencies => [:specific_type,:basic_type]
-
-
1
many_to_one :component, :library, :node, :datacenter, :project
-
1
one_to_many :component, :attribute_link, :attribute, :port_link, :monitoring_item, :dependency, :component_order, :layout, :file_asset, :link_def, :service_add_on, :component_include_module, :task_template, :node_bindings, :service_setting, :action_def, :module_ref_lock
-
1
one_to_many_clone_omit :action_def, :service_setting, :service_add_on, :layout, :module_ref_lock
-
-
1
virtual_column :project_id, :type => ID_TYPES[:id], :local_dependencies => [:project_project_id]
-
1
virtual_column :node_id, :type => ID_TYPES[:id], :local_dependencies => [:node_node_id]
-
1
virtual_column :library_id, :type => ID_TYPES[:id], :local_dependencies => [:library_library_id]
-
1
virtual_column :parent_name, :possible_parents => [:component,:library,:node,:project]
-
-
1
virtual_column :view_def_key, :type => :varchar, :hidden => true, :local_dependencies => [:id,:view_def_ref,:component_type]
-
-
1
virtual_column :namespace_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{ :model_name => :module_branch,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:module_branch_id)},
-
:cols => [:id,:group_id,:component_id]
-
},
-
{ :model_name => :component_module,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:module_branch,:component_id)},
-
:cols => [:id,:group_id,:display_name,:namespace_id]
-
},
-
{ :model_name => :namespace,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component_module,:namespace_id)},
-
:cols => [:id,:group_id,:display_name]
-
}]
-
-
###### virtual columns related to attributes
-
1
attributes_def = {
-
:model_name => :attribute,
-
:join_type => :left_outer,
-
:convert => true,
-
:join_cond=>{:component_component_id => q(:component,:id)} #TODO: want to use p(:component,:attribute) on left hand side
-
}
-
-
1
virtual_column :attributes, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[attributes_def.merge(
-
:cols => [:id,:display_name,:hidden,:description,id(:component),:attribute_value,:semantic_type,:semantic_type_summary,:data_type,:required,:dynamic,:cannot_change]
-
)]
-
1
virtual_column :attribute_values, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[attributes_def.merge(
-
:cols => [:id,:group_id,:display_name,:attribute_value]
-
)]
-
-
1
virtual_column :attributes_view_def_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[attributes_def.merge(
-
:filter => [:eq, :hidden, false],
-
:cols => [:id,:display_name,:view_def_key,id(:component),:semantic_type,:semantic_type_summary,:data_type,:required,:dynamic,:cannot_change]
-
)]
-
-
-
1
virtual_column :dynamic_attributes, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{ :model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:filter => [:eq,:dynamic, true],
-
:join_cond=>{:component_component_id => q(:component,:id)} ,
-
:cols => [:id,:group_id,:display_name]
-
}]
-
-
# this wil match if the component has an attribute that uses the default field
-
1
virtual_column :attribute_default_title_field, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{ :model_name => :attribute,
-
:convert => true,
-
:join_type => :left_outer,
-
:filter => [:eq,:display_name,Attribute.default_title_field()],
-
:join_cond=>{:component_component_id => q(:component,:id)} ,
-
:cols => [:id,:group_id,:display_name]
-
}]
-
-
1
virtual_column :link_def_links, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{ :model_name => :link_def,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => LinkDef.common_columns()
-
},
-
{ :model_name => :link_def_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:link_def_id => q(:link_def,:id)},
-
:cols => LinkDef::Link.common_columns()
-
}]
-
-
###### end of virtual columns related to attributes, ports, and link_defs
-
-
-
1
virtual_column :library, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :library,
-
:join_type => :inner,
-
:join_cond=>{:id => :component__library_library_id},
-
:cols => [:id,:display_name]
-
}]
-
-
1
virtual_column :node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :node,
-
:convert=>true,
-
:join_type => :inner,
-
:join_cond=>{:id => :component__node_node_id},
-
:cols => [:id,:display_name, :group_id]
-
}]
-
-
1
virtual_column :node_for_state_change_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :node,
-
:convert=>true,
-
:join_type => :inner,
-
:join_cond=>{:id => :component__node_node_id},
-
:filter => [:neq,:type,'assembly_wide'],
-
:cols => [:id,:display_name, :type, :external_ref, :ordered_component_ids, :agent_git_commit_id]
-
}]
-
-
1
virtual_column :implementation, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :implementation,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:implementation_id)},
-
:cols => Implementation.common_columns()
-
}]
-
1
virtual_column :implementation_file_paths, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :implementation,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:implementation_id)},
-
:cols => [:id,:display_name,:type]
-
},
-
{
-
:model_name => :file_asset,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:implementation_implementation_id => q(:implementation,:id)},
-
:cols => [:id,:file_name,:type,:path]
-
}]
-
-
1
virtual_column :module_name, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :implementation,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:implementation_id)},
-
:cols => [:id,:module_name]
-
}
-
]
-
-
1
virtual_column :module_branch, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :module_branch,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:module_branch_id)},
-
:cols => [:id,:display_name,:group_id,:branch,:repo_id,:version,:current_sha,:type,:is_workspace]
-
}]
-
-
1
virtual_column :component_module, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :module_branch,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:module_branch_id)},
-
:cols => [:id,:component_id]
-
},
-
{
-
:model_name => :component_module,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:module_branch,:component_id)},
-
:cols => [:id,:group_id,:display_name,:dsl_parsed]
-
}
-
]
-
1
virtual_column :instance_component_template_parent, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :component_template,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:ancestor_id)},
-
:cols => [:id,:group_id,:display_name,:component_type,:implementation_id]
-
}]
-
-
1
virtual_column :dependencies, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :dependency,
-
:alias => :dependencies,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => [:id,:display_name,:group_id,:ref,:search_pattern,:type,:description,:severity,:ancestor_id]
-
}
-
]
-
# above is direct dependencies; below is inherited ones
-
1
virtual_column :inherited_dependencies, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :parent_component,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:ancestor_id)},
-
:cols => [:id]
-
},
-
{
-
:model_name => :dependency,
-
:alias => :dependencies,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:component_component_id => q(:parent_component,:id)},
-
:cols => [:id,:search_pattern,:type,:description,:severity]
-
}
-
]
-
-
1
virtual_column :component_order_objs, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component_order,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => [:id,:after,:conditional,:component_component_id]
-
}
-
]
-
# above is direct dependencies; below is inheited ones
-
1
virtual_column :inherited_component_order_objs, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :parent_component,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:ancestor_id)},
-
:cols => [:id]
-
},
-
{
-
:model_name => :component_order,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:parent_component,:id)},
-
:cols => [:id,:after,:conditional,:component_component_id]
-
}
-
]
-
-
1
node_assembly_parts = {
-
:model_name => :node,
-
:join_type => :inner,
-
:join_cond=>{:assembly_id => q(:component,:id)},
-
:cols => [:id,:display_name,:assembly_id]
-
}
-
-
-
1
virtual_column :node_assembly_parts, :type => :json, :hidden => true,
-
:remote_dependencies => [node_assembly_parts]
-
-
1
virtual_column :node_assembly_parts_node_attrs, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
node_assembly_parts,
-
{
-
:model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name,:dynamic,:attribute_value]
-
}
-
]
-
1
virtual_column :node_assembly_parts_cmp_attrs, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
node_assembly_parts,
-
{
-
:model_name => :component,
-
:alias => :component_part,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id]
-
},
-
{
-
:model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component_part,:id)},
-
:cols => [:id,:display_name,:dynamic,:attribute_value]
-
}
-
]
-
-
1
virtual_column :containing_node_id_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :parent_component,
-
:join_type => :left_outer,
-
:join_cond=>{:id => p(:component,:component)},
-
:cols => [:id,:display_name,id(:node)]
-
}
-
]
-
-
1
virtual_column :has_pending_change, :type => :boolean, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :state_change,
-
# TODO: avoiding use of :component_component
-
:sequel_def => lambda{|ds|ds.where(:state => "pending").join(:attribute__attribute,{:id => :attribute_id}).group_and_count(:attribute__component_component_id)},
-
:join_type => :left_outer,
-
:join_cond=>{:component_component_id =>:component__id}
-
},
-
{
-
:model_name => :state_change,
-
:sequel_def => lambda{|ds|ds.where(:state => "pending").group_and_count(:component_id)},
-
:join_type => :left_outer,
-
:join_cond=>{:component_id =>:component__id}
-
}
-
]
-
-
1
virtual_column :sap_dependency_database, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :attribute,
-
:convert => true,
-
:filter => [:and, [:eq, :semantic_type_summary, "sap_config__db"]],
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => [:id,:display_name,:value_asserted,:value_derived,id(:component)]
-
},
-
{
-
:model_name => :component,
-
:alias => :parent_component,
-
:join_type => :inner,
-
:join_cond=>{:id => p(:component,:component)},
-
:cols => [:id,:display_name,id(:node)]
-
},
-
{
-
:model_name => :attribute,
-
:alias => :parent_attribute,
-
:convert => true,
-
:filter => [:and, [:eq,:display_name,"sap__l4"]],
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:parent_component,:id)},
-
:cols => [:id,:display_name,:value_asserted,:value_derived,id(:component)]
-
},
-
{
-
:model_name => :node,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => :parent_component__node_node_id},
-
:cols => [:id,:display_name]
-
}
-
]
-
-
-
1
virtual_column :layouts, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :layout,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => [:id,:display_name,id(:component),:def,:type,:is_active,:description,:updated_at]
-
}]
-
-
1
virtual_column :layouts_from_ancestor, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :component,
-
:alias => :template,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:ancestor_id)},
-
:cols => [:id,:display_name]
-
},
-
{
-
:model_name => :layout,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:template,:id)},
-
:cols => [:id,:display_name,id(:component),:def,:type,:is_active,:description,:updated_at]
-
}]
-
-
1
set_submodel(:assembly)
-
end
-
end
-
end
-
# TODO: replace with common methods in attribute
-
1
module DTK
-
1
module ComponentModelDefProcessor
-
1
def get_model_def(attr_filters={:hidden => true})
-
cmp_attrs_obj = get_component_with_attributes_unraveled(attr_filters)
-
ModelDefProcessorInternals.convert_to_model_def_form(cmp_attrs_obj)
-
end
-
-
1
def get_field_def(attr_filters={:hidden => true})
-
get_model_def(attr_filters)[:columns]
-
end
-
-
1
def update_field_def(field_def_update)
-
ModelDefProcessorInternals.update_field_def(self,field_def_update)
-
end
-
# TODO: cleanup uniform way of giving field def; for below just assuming hash display name
-
1
def create_or_modify_field_def(field_def)
-
ModelDefProcessorInternals.create_or_modify_field_def(self,field_def)
-
end
-
-
1
module ModelDefProcessorInternals
-
# TODO: remove extend R8Tpl::Utility::I18n
-
-
# returns the list of idhs that have been created or modified
-
1
def self.create_or_modify_field_def(component,field_def)
-
attr_mh = component.model_handle.create_childMH(:attribute)
-
attr_hash = Aux::hash_subset(field_def,CreateFields)
-
unless attr_hash[:display_name]
-
raise Error.new("display_name required in field_def")
-
end
-
attr_hash[:ref] = attr_hash[:display_name]
-
attr_hash[:semantic_data_type] ||= Attribute::SemanticDatatype.default().to_s
-
attr_hash[:data_type] ||= Attribute::SemanticDatatype.datatype(attr_hash[:semantic_data_type]).to_s
-
# TODO: may use a method rather than below that is more efficient; below returns alll children rather than filtered search
-
Model.modify_children_from_rows(attr_mh,component.id_handle,[attr_hash],[:ref],:update_matching => true,:no_delete => true)
-
end
-
6
CreateFields = [:display_name,:data_type,:dynamic,:required,:semantic_data_type].map{|sym|{sym.to_s => sym}} + [{'default' => :value_asserted}]
-
-
1
def self.update_field_def(component,field_def_update)
-
# compute default
-
default_assign = AttributeComplexType.ravel_raw_post_hash({field_def_update["id"] => field_def_update["default"]},:attribute,component[:id]).first
-
attr_mh = component.model_handle.createMH(:attribute)
-
attr_hash = Aux::hash_subset(field_def_update,UpdateFields - %w{default i18n}).merge(default_assign)
-
Model.update_from_rows(attr_mh,[attr_hash],:partial_value => true)
-
-
field_def = field_def_update["field_def"]
-
# update label
-
# TODO: if now if whether cahnged can be more efficient
-
label = field_def_update["i18n"]
-
component.update_attribute_i18n_label(field_def["name"],label) if label
-
field_def.merge(Aux::hash_subset(field_def_update,UpdateFields))
-
end
-
1
UpdateFields = %w{default description required, i18n}
-
-
1
def self.convert_to_model_def_form(cmp_attrs_obj)
-
component_i18n = cmp_attrs_obj.get_component_i18n_label()
-
ret = Aux::ordered_hash_subset(cmp_attrs_obj,ComponentMappings){|v|v.kind_of?(String) ? v.to_sym : v}
-
-
ret[:columns] = cmp_attrs_obj[:attributes].map do |attr|
-
attr_i18n = cmp_attrs_obj.get_attribute_i18n_label(attr)
-
seed = {:i18n => attr_i18n, :component_i18n => component_i18n}
-
opts = {:include_virtual_columns => true,:seed => seed}
-
Aux::ordered_hash_subset(attr,ColumnMappings,opts) do |k,v|
-
convert_value_if_needed(k,v,attr)
-
end
-
end
-
ret
-
end
-
=begin
-
def self.convert_to_model_def_form(cmp_attrs_obj)
-
i18n = get_i18n_mappings_for_models(:attribute,:component)
-
component_type = cmp_attrs_obj[:component_type]
-
ret = Aux::ordered_hash_subset(cmp_attrs_obj,ComponentMappings){|v|v.kind_of?(String) ? v.to_sym : v}
-
-
ret[:columns] = cmp_attrs_obj[:attributes].map do |col_info|
-
i18n_attr = i18n_string(i18n,:attribute,col_info[:display_name],component_type)
-
i18n_component = i18n_string(i18n,:component,col_info[:component_name])
-
seed = {:i18n => i18n_attr, :component_i18n => i18n_component}
-
opts = {:include_virtual_columns => true,:seed => seed}
-
Aux::ordered_hash_subset(col_info,ColumnMappings,opts) do |k,v|
-
convert_value_if_needed(k,v,col_info)
-
end
-
end
-
ret
-
end
-
=end
-
1
private
-
1
ComponentMappings =
-
[
-
{:component_type => :model_name},
-
:id
-
]
-
1
ColumnMappings =
-
[
-
:node_id,
-
:node_name,
-
:component_type,
-
{:component_component_id => :component_id},
-
{:display_name => :name},
-
{:unraveled_attribute_id => :id},
-
:description,
-
{:data_type => :type},
-
{:attribute_value => :default},
-
:required,
-
{:dynamic => :read_only},
-
:cannot_change
-
]
-
-
1
def self.convert_value_if_needed(k,v,col_info)
-
case k
-
when :type then v.to_sym
-
when :default then type_convert_value(v,col_info[:data_type])
-
else v
-
end
-
end
-
1
def self.type_convert_value(v,type)
-
return nil if v.nil?
-
case type && type.to_sym
-
when :integer then v.to_i
-
else v
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Component
-
1
class ResourceMatching
-
# the input is a a list of compoennt regs each augmented on target node where it is doing to be staged
-
# returns [matches, conflicts] in terms of component templaet ids
-
1
def self.find_matches_and_conflicts(aug_cmp_refs)
-
matches = Matches.new
-
conflicts = Array.new
-
ret = [matches,conflicts]
-
return ret if aug_cmp_refs.empty?
-
# this is information about any possible relevant componet on a target node
-
cmp_with_attrs = get_matching_components_with_attributes(aug_cmp_refs)
-
-
# to determine if there is a match we need to first get attributes for any cmp_ref that may be involved in a match
-
# these are ones where there is atleast one matching node/component_type pair
-
pruned_cmp_refs = aug_cmp_refs.select do |cmp_ref|
-
component_type = cmp_ref[:component_template][:component_type]
-
target_node_id = cmp_ref[:target_node_id]
-
cmp_with_attrs.find{|cmp|cmp[:component_type] == component_type and cmp[:node_node_id] == target_node_id}
-
end
-
return ret if pruned_cmp_refs.empty?
-
-
# get attribute information for pruned_cmp_refs
-
ndx_cmp_ref_attrs = get_ndx_component_ref_attributes(pruned_cmp_refs)
-
-
# now del withj matching taht takes into account resource defs keys
-
-
# this query finds the components and its attributes on the nodes
-
[matches,conflicts]
-
end
-
-
1
private
-
# each aug_cmp_ref is augmented with target_node_id indicating where it is to be deployed
-
1
def self.get_matching_components_with_attributes(aug_cmp_refs)
-
ndx_ret = Hash.new
-
target_node_ids = aug_cmp_refs.map{|cmp_ref|cmp_ref[:target_node_id]}.uniq
-
cmp_types = aug_cmp_refs.map{|cmp_ref|cmp_ref[:component_template][:component_type]}.uniq
-
scalar_cols = [:id,:group_id,:display_name,:node_node_id,:component_type]
-
sp_hash = {
-
:cols => scalar_cols + [:attribute_values],
-
:filter => [:and,[:oneof,:node_node_id,target_node_ids],[:oneof,:component_type,cmp_types]]
-
}
-
cmp_mh = aug_cmp_refs.first.model_handle(:component)
-
Model.get_objs(cmp_mh,sp_hash).each do |cmp|
-
cmp_id = cmp[:id]
-
pntr = ndx_ret[cmp_id] ||= cmp.hash_subset(*scalar_cols).merge(:attributes => Array.new)
-
pntr[:attributes] << cmp[:attribute]
-
end
-
ndx_ret.values
-
end
-
-
# looks at both the component template attribute value plus the overrides
-
# indexed by compoennt ref id
-
# we assume each component ref has component_template_id set
-
1
def self.get_ndx_component_ref_attributes(cmp_refs)
-
ret = Hash.new
-
return ret if cmp_refs.empty?
-
-
# get template attribute values
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:attribute_value,:component_component_id],
-
:filter => [:oneof,:component_component_id,cmp_refs.map{|r|r[:component_template_id]}]
-
}
-
attr_mh = cmp_refs.first.model_handle(:attribute)
-
ndx_template_to_ref = cmp_refs.inject(Hash.new){|h,cmp_ref|h.merge(cmp_ref[:component_template_id] => cmp_ref[:id])}
-
-
ndx_attrs = Model.get_objs(attr_mh,sp_hash).inject(Hash.new) do |h,attr|
-
cmp_ref_id = ndx_template_to_ref[attr[:component_component_id]]
-
h.merge(attr[:id] => attr.merge(:component_ref_id => cmp_ref_id))
-
end
-
-
# get override attributes
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:attribute_value,:attribute_template_id],
-
:filter => [:oneof,:component_ref_id,cmp_refs.map{|r|r[:id]}]
-
}
-
override_attr_mh = attr_mh.createMH(:attribute_override)
-
Model.get_objs(override_attr_mh,sp_hash) do |ovr_attr|
-
attr = ndx_attrs[ovr_attr[:attribute_template_id]]
-
if ovr_attr[:attribute_value]
-
attr[:attribute_value] = ovr_attr[:attribute_value]
-
end
-
end
-
-
ret = cmp_refs.inject(Hash.new){|h,cmp_ref|h.merge(cmp_ref[:id] => Array.new)}
-
ndx_attrs.each_value{|attr|ret[attr[:component_ref_id]] << attr}
-
ret
-
end
-
-
1
public
-
1
class Matches < Array
-
1
def ids()
-
map{|el|el[:id]}
-
end
-
end
-
end
-
end
-
end
-
=begin
-
{2147512276=>
-
[{:component_template_id=>2147507564,
-
:display_name=>"test_nginx__real_server_stub",
-
:node_node_id=>2147507829,
-
:component_template=>
-
{:display_name=>"test_nginx__real_server_stub",
-
:only_one_per_node=>true,
-
:component_type=>"test_nginx__real_server_stub",
-
:id=>2147507564,
-
:group_id=>2147483650},
-
:node=>
-
{:display_name=>"real_server",
-
:assembly_id=>2147507818,
-
:id=>2147507829,
-
:group_id=>2147483650},
-
:id=>2147507830}],
-
=end
-
-
-
2
module DTK; class Component
-
1
class Template < self
-
1
def self.get_objs(mh,sp_hash,opts={})
-
if mh[:model_name] == :component_template
-
super(mh.merge(:model_name=>:component),sp_hash,opts).map{|cmp|create_from_component(cmp)}
-
else
-
super
-
end
-
end
-
-
1
def self.create_from_component(cmp)
-
cmp && cmp.id_handle().create_object(:model_name => :component_template).merge(cmp)
-
end
-
-
1
def self.get_info_for_clone(cmp_template_idhs)
-
ret = Array.new
-
return ret if cmp_template_idhs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:project_project_id,:component_type,:version,:module_branch],
-
:filter => [:oneof,:id,cmp_template_idhs.map{|idh|idh.get_id()}]
-
}
-
mh = cmp_template_idhs.first.createMH(:component_template)
-
ret = get_objs(mh,sp_hash)
-
ret.each{|r|r.get_current_sha!()}
-
ret
-
end
-
-
1
def update_with_clone_info!()
-
clone_info = self.class.get_info_for_clone([id_handle()]).first
-
merge!(clone_info)
-
end
-
-
1
def get_current_sha!()
-
unless module_branch = self[:module_branch]
-
Log.error("Unexpected that get_current_sha called on object when self[:module_branch] not set")
-
return nil
-
end
-
module_branch[:current_sha] || module_branch.update_current_sha_from_repo!()
-
end
-
-
1
def get_component_module()
-
get_obj_helper(:component_module)
-
end
-
-
# returns non-nil only if this is a component that takes a title and if so returns the attribute object that stores the title
-
1
def get_title_attribute_name?()
-
rows = self.class.get_title_attributes([id_handle])
-
rows.first[:display_name] unless rows.empty?
-
end
-
-
# for any member of cmp_tmpl_idhs that is a non-singleton, it returns the title attribute
-
1
def self.get_title_attributes(cmp_tmpl_idhs)
-
ret = Array.new
-
return ret if cmp_tmpl_idhs.empty?
-
# first see if not only_one_per_node and has the default attribute
-
sp_hash = {
-
:cols => [:attribute_default_title_field],
-
:filter => [:and,[:eq,:only_one_per_node,false],
-
[:oneof,:id,cmp_tmpl_idhs.map{|idh|idh.get_id()}]]
-
}
-
rows = get_objs(cmp_tmpl_idhs.first.createMH(),sp_hash)
-
return ret if rows.empty?
-
-
# rows will have element for each element of cmp_tmpl_idhs that is non-singleton
-
# element key :attribute will be nil if it does not use teh default key; for all
-
# these we need to make the more expensive call Attribute.get_title_attributes
-
need_title_attrs_cmp_idhs = rows.select{|r|r[:attribute].nil?}.map{|r|r.id_handle()}
-
ret = rows.map{|r|r[:attribute]}.compact
-
unless need_title_attrs_cmp_idhs.empty?
-
attr_cols = [:id,:group_id,:display_name,:external_ref,:component_component_id]
-
title_attrs = get_attributes(need_title_attrs_cmp_idhs,:cols => attr_cols).select{|a|a.is_title_attribute?()}
-
unless title_attrs.empty?
-
ret += title_attrs
-
end
-
end
-
ret
-
end
-
-
1
class MatchElement < Hash
-
1
def initialize(hash)
-
super()
-
replace(hash)
-
end
-
1
def component_type()
-
self[:component_type]
-
end
-
1
def version_field()
-
self[:version_field]
-
end
-
1
def version()
-
self[:version]
-
end
-
1
def namespace()
-
self[:namespace]
-
end
-
end
-
1
def self.get_matching_elements(project_idh,match_element_array,opts={})
-
ret = Array.new
-
cmp_types = match_element_array.map{|el|el.component_type}.uniq
-
versions = match_element_array.map{|el|el.version_field}
-
sp_hash = {
-
:cols => [:id,:group_id,:component_type,:version,:implementation_id,:external_ref],
-
:filter => [:and,
-
[:eq, :project_project_id, project_idh.get_id()],
-
[:oneof, :version, versions],
-
[:eq, :assembly_id, nil],
-
[:eq, :node_node_id, nil],
-
[:oneof, :component_type, cmp_types]]
-
}
-
component_rows = get_objs(project_idh.createMH(:component),sp_hash)
-
augment_with_namespace!(component_rows)
-
ret = Array.new
-
unmatched = Array.new
-
match_element_array.each do |el|
-
matches = component_rows.select do |r|
-
el.version_field == r[:version] and
-
el.component_type == r[:component_type] and
-
(el.namespace.nil? or el.namespace == r[:namespace])
-
end
-
if matches.empty?
-
unmatched << el
-
elsif matches.size == 1
-
ret << matches.first
-
else
-
# TODO: may put in logic that sees if one is service modules ns and uses that one when multiple matches
-
module_name = Component.module_name(el.component_type)
-
error_params = {
-
:module_type => 'component',
-
:module_name => Component.module_name(el.component_type),
-
:namespaces => matches.map{|m|m[:namespace]}.compact # compact just to be safe
-
}
-
raise ServiceModule::ParsingError::AmbiguousModuleRef.new(error_params)
-
end
-
end
-
unless unmatched.empty?()
-
# TODO: indicate whether there is a nailed namespace that does not exist or no matches at all
-
cmp_refs = unmatched.map do |match_el|
-
cmp_type = match_el.component_type
-
if ns = match_el.namespace
-
cmp_type = "#{ns}:#{cmp_type}"
-
end
-
{
-
:component_type => cmp_type,
-
:version => match_el.version
-
}
-
end
-
if opts[:service_instance_module]
-
raise ServiceModule::ParsingError::RemovedServiceInstanceCmpRef.new(cmp_refs, opts)
-
else
-
raise ServiceModule::ParsingError::DanglingComponentRefs.new(cmp_refs, opts)
-
end
-
end
-
ret
-
end
-
-
1
def self.list(mh,opts=Opts.new)
-
project_idh = opts[:project_idh]
-
assembly = opts[:assembly_instance]
-
sp_hash = {
-
:cols => [:id, :type, :display_name, :description, :component_type, :version, :refnum, :module_branch_id],
-
:filter => [:and, [:eq, :type, "template"],
-
[:oneof, :version, filter_on_versions(:assembly => assembly)],
-
[:eq, :project_project_id, project_idh.get_id()]]
-
}
-
cmps = get_objs(project_idh.createMH(:component),sp_hash,:keep_ref_cols => true)
-
-
ingore_type = opts[:ignore]
-
ret = []
-
cmps.each do |r|
-
sp_h = {
-
:cols => [:id, :type, :display_name, :component_module_namespace_info],
-
:filter => [:eq, :id, r[:module_branch_id]]
-
}
-
m_branch = Model.get_obj(project_idh.createMH(:module_branch),sp_h)
-
# ret << r unless m_branch[:type].eql?(ingore_type)
-
if(m_branch && !m_branch[:type].eql?(ingore_type))
-
branch_namespace = m_branch[:namespace]
-
r[:namespace] = branch_namespace[:display_name]
-
ret << r
-
end
-
end
-
-
if constraint = opts[:component_version_constraints]
-
ret = ret.select{|r|constraint.meets_constraint?(r)}
-
end
-
ret.each{|r|r.convert_to_print_form!()}
-
ret.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def self.check_valid_id(model_handle,id,version_or_versions=nil)
-
if version_or_versions.kind_of?(Array)
-
version_or_versions.each do |version|
-
if ret = check_valid_id_aux(model_handle,id,version,:no_error_if_no_match=>true)
-
return ret
-
end
-
end
-
raise ErrorIdInvalid.new(id,pp_object_type())
-
else
-
check_valid_id_aux(model_handle,id,version_or_versions)
-
end
-
end
-
-
1
def self.check_valid_id_aux(model_handle,id,version,opts={})
-
filter =
-
[:and,
-
[:eq, :id, id],
-
[:eq, :type, "template"],
-
[:eq, :node_node_id, nil],
-
[:neq, :project_project_id, nil],
-
[:eq,:version,version_field(version)]]
-
check_valid_id_helper(model_handle,id,filter,opts)
-
end
-
-
# if title is in the name, this strips it off
-
1
def self.name_to_id(model_handle,name,version_or_versions=nil)
-
if version_or_versions.kind_of?(Array)
-
version_or_versions.each do |version|
-
if ret = name_to_id_aux(model_handle,name,version,:no_error_if_no_match=>true)
-
return ret
-
end
-
end
-
raise ErrorNameDoesNotExist.new(name,pp_object_type())
-
else
-
name_to_id_aux(model_handle,name,version_or_versions)
-
end
-
end
-
-
# This method returns a component augmented with keys having objects
-
# :module_branch
-
# :component_module
-
# :namespace
-
1
def self.get_augmented_component_template(cmp_mh, cmp_name, namespace, assembly)
-
ret_cmp, match_cmps, cmp_module_ids = nil, [], []
-
display_name = display_name_from_user_friendly_name(cmp_name)
-
component_type,title = ComponentTitle.parse_component_display_name(display_name)
-
sp_hash = {
-
:cols => [:id, :group_id, :display_name, :module_branch_id, :type, :ref, :augmented_with_module_info,:version],
-
:filter => [:and,
-
[:eq, :type, 'template'],
-
[:eq, :component_type, component_type],
-
[:neq, :project_project_id, nil],
-
[:oneof, :version, filter_on_versions(:assembly => assembly)],
-
[:eq, :node_node_id, nil]]
-
}
-
cmp_templates = get_objs(cmp_mh.createMH(:component_template),sp_hash,:keep_ref_cols=>true)
-
if namespace
-
# filter component templates by namsepace
-
cmp_templates.select!{|cmp| cmp[:namespace][:display_name] == namespace}
-
end
-
return ret_cmp if cmp_templates.empty?
-
-
# there could be two matches one from base template and one from service insatnce specific template; in
-
# this case use service specfic one
-
assembly_version = assembly_version(assembly)
-
if cmp_templates.find{|cmp| cmp[:version] == assembly_version}
-
cmp_templates.select!{|cmp|cmp[:version] == assembly_version}
-
end
-
unless cmp_templates.size == 1
-
possible_names = cmp_templates.map{|r|r.display_name_print_form(:namespace_prefix=>true)}.join(',')
-
raise ErrorUsage.new("Multiple components with different namespaces match; pick one from: #{possible_names}")
-
end
-
ret_cmp = cmp_templates.first
-
-
# if component_template with same name exist but have different namespace, return error message that user should
-
# use component_template from module that already exist in service instance
-
opts = Opts.new(:with_namespace => true)
-
assembly_cmp_mods = assembly.list_component_modules(opts) # component_modules already associated with service instance
-
ret_cmp_mod = ret_cmp[:component_module][:display_name]
-
if cmp_mod = assembly_cmp_mods.find{|cmp_mod|cmp_mod[:display_name] == ret_cmp_mod}
-
ret_cmp_ns = ret_cmp[:namespace][:display_name]
-
cmp_mod_ns = cmp_mod[:namespace_name]
-
if ret_cmp_ns != cmp_mod_ns
-
raise ErrorUsage.new("Unable to add component from (#{ret_cmp_ns}:#{ret_cmp_mod}) because you are already using components from following component modules: #{cmp_mod_ns}:#{cmp_mod[:display_name]}")
-
end
-
end
-
ret_cmp
-
end
-
-
1
private
-
1
def self.assembly_version(assembly)
-
ModuleVersion.ret(assembly)
-
end
-
1
def self.filter_on_versions(opts)
-
assembly = opts[:assembly]
-
['master', assembly && assembly_version(assembly)].compact
-
end
-
-
# if title is in the name, this strips it off
-
1
def self.name_to_id_aux(model_handle,name,version,opts={})
-
display_name = display_name_from_user_friendly_name(name)
-
component_type,title = ComponentTitle.parse_component_display_name(display_name)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,
-
[:eq, :type, 'template'],
-
[:eq, :component_type, component_type],
-
[:neq, :project_project_id, nil],
-
[:eq, :node_node_id, nil],
-
[:eq, :version, version_field(version)]]
-
}
-
name_to_id_helper(model_handle,Component.name_with_version(name,version),sp_hash,opts)
-
end
-
-
1
def self.augment_with_namespace!(component_templates)
-
ret = Array.new
-
return ret if component_templates.empty?
-
sp_hash = {
-
:cols => [:id,:namespace_info],
-
:filter => [:oneof, :id, component_templates.map{|r|r.id()}]
-
}
-
mh = component_templates.first.model_handle()
-
ndx_namespace_info = get_objs(mh,sp_hash).inject(Hash.new) do |h,r|
-
h.merge(r[:id] => (r[:namespace]||{})[:display_name])
-
end
-
component_templates.each do |r|
-
if namespace = ndx_namespace_info[r[:id]]
-
r.merge!(:namespace => namespace)
-
end
-
end
-
component_templates
-
end
-
end
-
-
# TODO: may move to be instance method on Template
-
1
module TemplateMixin
-
1
def update_default(attribute_name,val,field_to_match=:display_name)
-
tmpl_attr_obj = get_virtual_attribute(attribute_name,[:id,:value_asserted],field_to_match)
-
raise Error.new("cannot find attribute #{attribute_name} on component template") unless tmpl_attr_obj
-
update(:updated => true)
-
tmpl_attr_obj.update(:value_asserted => val)
-
# update any instance that points to this template, which does not have an instance value asserted
-
# TODO: can be more efficient by doing selct and update at same time
-
base_sp_hash = {
-
:model_name => :component,
-
:filter => [:eq, :ancestor_id, id()],
-
:cols => [:id]
-
}
-
join_array =
-
[{
-
:model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:filter => [:and, [:eq, field_to_match, attribute_name], [:eq, :is_instance_value,false]],
-
:join_cond => {:component_component_id => :component__id},
-
:cols => [:id,:component_component_id]
-
}]
-
attr_ids_to_update = Model.get_objects_from_join_array(model_handle,base_sp_hash,join_array).map{|r|r[:attribute][:id]}
-
unless attr_ids_to_update.empty?
-
attr_mh = createMH(:attribute)
-
attribute_rows = attr_ids_to_update.map{|attr_id|{:id => attr_id, :value_asserted => val}}
-
Attribute.update_and_propagate_attributes(attr_mh,attribute_rows)
-
end
-
end
-
-
end
-
end; end
-
-
1
module DTK
-
1
class Component
-
1
class Test < self
-
-
1
class LinkedTest
-
1
attr_reader :test_component,:var_mappings_hash
-
1
def initialize(test_component,ams)
-
@test_component = test_component
-
@attribute_mappings = ams
-
@var_mappings_hash = nil
-
end
-
1
def get_component_attributes()
-
@attribute_mappings.map{|am|am_component_attr(am)}
-
end
-
1
def find_mapped_component_test_attributes(cmp_attrs)
-
# This function flows the cmp_attribute values through component_To_test_attribute_mappings
-
component_to_test_attribute_mappings()
-
end
-
-
1
private
-
1
def component_to_test_attribute_mappings()
-
return @var_mappings_hash if @var_mappings_hash
-
@var_mappings_hash = Hash.new
-
@attribute_mappings.each do |am|
-
index = am_component_attr(am)
-
test_attr = am_test_attr(am)
-
-
if existing_var = @var_mappings_hash[index]
-
# TODO: just putting this in temporarily to check assumptions are right
-
unless existing_var == test_attr
-
Log.error("Unexpected that #{index} has multiple mappings")
-
next
-
end
-
end
-
-
@var_mappings_hash[index] = am_test_attr(am)
-
end
-
@var_mappings_hash
-
end
-
-
#'output' is the attribute that is used to propagate value to the input
-
# For LinkedTest objects output corresponds to component and input to component test
-
1
def am_component_attr(am)
-
output = []
-
am.each do |a|
-
output << a[:output][:term_index]
-
end
-
return output
-
end
-
1
def am_test_attr(am)
-
output = []
-
am.each do |a|
-
output << a[:input][:term_index]
-
end
-
return output
-
end
-
end
-
-
1
class LinkedTests
-
1
attr_reader :component,:test_array,:node
-
1
def initialize(cmp,test_array=[])
-
@node = {:id => cmp[:node][:id], :display_name => cmp[:node][:display_name]}
-
@component = cmp.hash_subset(:id,:display_name)
-
@test_array = test_array
-
end
-
1
def add_test!(test_component,ams)
-
@test_array << LinkedTest.new(test_component,ams)
-
end
-
-
1
def find_relevant_linked_test_array()
-
# find the relevant parameters on @test_component by looking at attribute mappings
-
cmp_attribute_names = Array.new
-
@test_array.each do |test|
-
cmp_attribute_names += test.get_component_attributes()
-
end
-
cmp_attribute_names.uniq!
-
# Compute the component attribute vars that correspond to cmp_attribute_names
-
cmp_attr_vals = nil
-
find_mapped_component_test_attributes(cmp_attr_vals)
-
end
-
1
private
-
1
def find_mapped_component_test_attributes(cmp_attr_vals)
-
@test_array.select {|test|test.find_mapped_component_test_attributes(cmp_attr_vals)}
-
end
-
end
-
-
# returns array of ComponentLinkedTests
-
1
def self.get_linked_tests(assembly_instance, project, filter_component=nil)
-
ret = Array.new
-
opts = Opts.new(
-
:detail_to_include=>[:component_dependencies],
-
:filter_component => filter_component
-
)
-
-
aug_cmps = assembly_instance.get_augmented_components(opts)
-
# ndx_test_cmps is tests components indexed by field component_type
-
#TODO: need to factor in taest namespaces
-
link_def_links, ndx_test_cmps = get_link_def_links_to_tests(project,aug_cmps)
-
return ret if link_def_links.empty?
-
-
ndx_attribute_mappings = Hash.new
-
link_def_links.each do |ld_link|
-
am_list = ld_link.attribute_mappings()
-
pntr = ndx_attribute_mappings[ld_link[:link_def_id]] ||= {:test_component => ld_link[:remote_component_type], :ams => Array.new}
-
pntr[:ams] << am_list
-
end
-
-
ndx_ret = Hash.new
-
each_link(aug_cmps) do |cmp,link|
-
cmp_id = cmp.id
-
if test_info = ndx_attribute_mappings[link[:id]]
-
linked_tests = ndx_ret[cmp_id] ||= LinkedTests.new(cmp)
-
test_component = ndx_test_cmps[test_info[:test_component]]
-
linked_tests.add_test!(test_component,test_info[:ams])
-
end
-
end
-
ndx_ret.values
-
end
-
-
1
private
-
# returns [link_def_links, ndx_test_cmps] (ndx is component_type)
-
1
def self.get_link_def_links_to_tests(project,aug_cmps)
-
ret = [Array.new,Array.new]
-
# Find all dependencies (link defs) that point to a test
-
# then find all link_defs and select ones that are associated with component tests
-
link_defs = Array.new
-
each_link(aug_cmps) do |cmp,link|
-
link_defs << link
-
end
-
return ret if link_defs.empty?
-
-
# get the link def links
-
cols = [:id,:group_id,:display_name,:remote_component_type,:position,:content,:type,:link_def_id]
-
link_def_links = LinkDef.get_link_def_links(link_defs.map{|ld|ld.id_handle()},:cols => cols)
-
# remove any link def link not associated with a test
-
# first remove any link that is not internal (link on same compoennt)
-
link_def_links.reject!{|ldl|ldl[:type] != "internal"}
-
return ret if link_def_links.empty?
-
-
possible_test_cmp_types = link_def_links.map{|ldl|ldl[:remote_component_type]}.uniq
-
ndx_test_cmps = get_ndx_test_components(project,possible_test_cmp_types)
-
return ret if ndx_test_cmps.empty?
-
-
# remove any element on link_def_links not associated with a test
-
link_def_links.reject!{|ldl|ndx_test_cmps[ldl[:remote_component_type]].nil?}
-
return ret if ndx_test_cmps.empty?
-
-
[link_def_links,ndx_test_cmps]
-
end
-
-
1
def self.get_ndx_test_components(project,possible_test_cmp_types)
-
ret = Hash.new
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:attributes,:component_type,:external_ref,:module_branch_id],
-
:filter => [:and,
-
[:eq,:assembly_id,nil],
-
[:eq,:project_project_id,project.id],
-
[:oneof,:component_type,possible_test_cmp_types]]
-
}
-
Model.get_objs(project.model_handle(:component),sp_hash).each do |r|
-
next unless TestExternalRefTypes.include?((r[:external_ref]||{})[:type])
-
ndx = r[:component_type]
-
cmp = ret[ndx] ||= r.hash_subset(:id,:group_id,:display_name,:component_type,:external_ref).merge(:attributes => Array.new)
-
cmp[:attributes] << r[:attribute]
-
end
-
ret
-
end
-
1
TestExternalRefTypes = ['serverspec_test']
-
-
1
def self.each_link(aug_cmps,&block)
-
aug_cmps.each do |cmp|
-
(cmp[:dependencies]||[]).each do |dep|
-
if dep.kind_of?(::DTK::Dependency::Link)
-
block.call(cmp,dep.link_def)
-
end
-
end
-
end
-
end
-
end
-
end
-
end
-
1
module XYZ
-
1
module ComponentUserClassMixin
-
1
def create_user_library_template(model_handle,params)
-
pp [:create_user_library_template,model_handle,params]
-
username = params["username"]
-
raise Error.new("missing user name") unless username
-
# TODO stub to get config_agent_type
-
config_agent_type = params["config_agent_type"] || "puppet"
-
user_cmp_proc = UserComponentProcessor.create(config_agent_type)
-
# TODO: stub to get library
-
library_obj = get_objs(model_handle.createMH(:library),{:cols => [:id]}).first
-
raise Error.new("cannot find library") unless library_obj
-
generic_user_tmpl = user_cmp_proc.find_generic_library_template(library_obj)
-
raise Error.new("cannot find user template") unless generic_user_tmpl
-
override_attrs = {:specific_type => "user", :display_name => "user_#{username}"}
-
opts = {:ret_new_obj_with_cols => [:id]}
-
new_user_obj = library_obj.clone_into(generic_user_tmpl,override_attrs,opts)
-
user_cmp_proc.set_virtual_attributes(new_user_obj,username,params)
-
end
-
1
private
-
1
class UserComponentProcessor
-
1
def self.create(config_agent_type)
-
if config_agent_type == "puppet"
-
UserComponentProcPuppet.new(config_agent_type)
-
elsif config_agent_type == "chef"
-
UserComponentProcChef.new(config_agent_type)
-
else
-
raise Error.new("unknown config_agent_type #{config_agent_typ}")
-
end
-
end
-
1
def initialize(config_agent_type)
-
@config_agent_type = config_agent_type
-
end
-
1
def find_generic_library_template(library_obj)
-
sp_hash = {
-
:cols => [:id,:config_agent_type],
-
:filter => [:eq,:specific_type,"generic_user"]
-
}
-
library_obj.get_children_objs(:component,sp_hash).find do |cmp|
-
cmp[:config_agent_type] == @config_agent_type
-
end
-
end
-
1
def set_virtual_attributes(user_obj,username,params)
-
# TODO: can be more efficient if use update from select
-
attr_ids = user_obj.get_children_objs(:attribute,:cols => [:id,:display_name]).inject({}) do |h,r|
-
h.merge(r[:display_name] => r[:id])
-
end
-
updates = updates_from_params(username,params)
-
update_rows = Array.new
-
updates.each do |k,v|
-
id = attr_ids[k]
-
unless id
-
Log.error("virtual attribute #{k} is illegal")
-
else
-
update_rows << {:id => id, :value_asserted => v}
-
end
-
end
-
return if update_rows.empty?
-
Model.update_from_rows(user_obj.model_handle(:attribute),update_rows)
-
end
-
end
-
1
class UserComponentProcPuppet < UserComponentProcessor
-
1
def updates_from_params(username,params)
-
# required attributes
-
ret = {
-
"username" => username,
-
"fullname" => params["fullname"] || username.capitalize,
-
}
-
if params["has_home_directory"] == "true"
-
ret.merge!("home_dir" => "/home/#{params["home_directory_name"]|| username}")
-
end
-
if params.has_key?("root_access")
-
ret.merge!("root_access" => params["root_access"])
-
end
-
rsa_pub_keys = Array.new
-
(params["ssh_key"]||[]).each_with_index do |k,i|
-
unless k.empty?
-
title = (params["ssh_key_title"]||[])[i]
-
raise Error.new("Need title for key in position #{i.to_s}") unless title
-
rsa_pub_keys << {"username" => username,"title" => title, "key" => k}
-
end
-
end
-
unless rsa_pub_keys.empty?
-
ret.merge!("rsa_pub_keys" => rsa_pub_keys)
-
end
-
ret
-
end
-
end
-
1
class UserComponentProcChef < UserComponentProcessor
-
1
def updates_from_params(username,params)
-
raise Error.new("Not implemented yet")
-
end
-
end
-
end
-
end
-
1
module XYZ
-
1
module ComponentViewMetaProcessor
-
1
def create_view_meta_from_layout_def(view_type,layout_def)
-
case view_type
-
when :edit then ViewMetaProcessorInternals.create_from_layout_def__edit(layout_def)
-
else raise Error.new("not implemented for view type #{view_type}")
-
end
-
end
-
-
1
module ViewMetaProcessorInternals
-
1
def self.create_from_layout_def__edit(layout_def)
-
ret = ActiveSupport::OrderedHash.new()
-
ret[:action] = ""
-
ret[:hidden_fields] = hidden_fields(:edit)
-
ret[:field_groups] = field_groups(layout_def)
-
ret
-
end
-
-
1
def self.field_groups(layout_def)
-
(layout_def[:groups]||[]).map do |group|
-
{:num_cols => 1,
-
:display_labels => true,
-
:fields => group[:fields].map {|r|{r[:name].to_sym => r}}
-
}
-
end
-
end
-
-
1
def self.hidden_fields(type)
-
HiddenFields[type].map do |hf|
-
{hf.keys.first => Aux::ordered_hash_subset(hf.values.first,[:required,:type,:value])}
-
end
-
end
-
1
HiddenFields = {
-
:list =>
-
[
-
{:id => {
-
:required => true,
-
:type => 'hidden',
-
}}
-
],
-
:edit =>
-
[
-
{
-
:id => {
-
:required => true,
-
:type => 'hidden',
-
}
-
},
-
{
-
:model => {
-
:required => true,
-
:type => 'hidden',
-
:value => 'component',
-
}
-
},
-
{
-
:action => {
-
:required => true,
-
:type => 'hidden',
-
:value => 'save_attribute'
-
},
-
}
-
],
-
:display =>
-
[
-
{
-
:id => {
-
:required => true,
-
:type => 'hidden',
-
}
-
},
-
{
-
:obj => {
-
:required => true,
-
:type => 'hidden',
-
:value => 'component',
-
}
-
},
-
{
-
:action => {
-
:required => true,
-
:type => 'hidden',
-
:value => 'edit',
-
},
-
}
-
]
-
}
-
-
end
-
end
-
end
-
1
module XYZ
-
1
module ComponentType
-
1
class Database < ComponentTypeHierarchy
-
1
def self.clone_db_onto_db_server_node(db_server_node,db_server_component)
-
base_sp_hash = {
-
:model_name => :component,
-
:filter => [:eq, :id, db_server_component[:ancestor_id]],
-
:cols => [:id,:library_library_id]
-
}
-
join_array =
-
[{
-
:model_name => :attribute,
-
:join_type => :inner,
-
:alias => :db_component_name,
-
:filter => [:eq, :display_name, "db_component"],
-
:join_cond => {:component_component_id => :component__id},
-
:cols => [:value_asserted,:component_component_id]
-
},
-
{
-
:model_name => :component,
-
:alias => :db_component,
-
:join_type => :inner,
-
:convert => true,
-
:join_cond => {:display_name => :db_component_name__value_asserted, :library_library_id => :component__library_library_id},
-
:cols => [:id,:display_name,:library_library_id]
-
}
-
]
-
-
rows = Model.get_objects_from_join_array(db_server_component.model_handle,base_sp_hash,join_array)
-
db_component = rows.first && rows.first[:db_component]
-
unless db_component
-
Log.error("Cannot find the db component associated with the db server")
-
return nil
-
end
-
new_db_cmp_id = db_server_node.clone_into(db_component)
-
db_server_component.model_handle.createIDH(:id => new_db_cmp_id)
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ComponentOrder < Model
-
1
def self.update_with_applicable_dependencies!(component_deps,component_idhs)
-
sample_idh = component_idhs.first
-
cols_for_get_virtual_attrs_call = [:component_type,:implementation_id,:extended_base]
-
# TODO: switched to use inherited component_order_objs; later will allow component_order_objs directly on component instances and have
-
# them override
-
# TODO: should also modifying cloning so component instances not getting the component_order_objs
-
sp_hash = {
-
# :cols => [:id,:component_order_objs]+cols_for_get_virtual_attrs_call,
-
:cols => [:id,:inherited_component_order_objs]+cols_for_get_virtual_attrs_call,
-
:filter => [:oneof, :id, component_idhs.map{|idh|idh.get_id()}]
-
}
-
cmps_with_order_info = prune_if_not_applicable(get_objs(sample_idh.createMH,sp_hash))
-
# cmps_with_order_info can have a component appear multiple fo each order relation
-
update_with_order_info!(component_deps,cmps_with_order_info)
-
end
-
-
# assumption that this is called with components having keys :id,:dependencies, :extended_base, :component_type
-
# this can be either component template or component instance with :dependencies joined in from associated template
-
# TODO: change :component_dependencies to :derived_order -> must chaneg all upstream uses of this return rest too
-
1
def self.get_ndx_cmp_type_and_derived_order(components)
-
ret = Hash.new
-
return ret if components.empty?
-
components.each do |cmp|
-
unless pntr = ret[cmp[:id]]
-
pntr = ret[cmp[:id]] = {:component_type => cmp[:component_type], :component_dependencies => Array.new}
-
end
-
if cmp[:extended_base]
-
pntr[:component_dependencies] << cmp[:extended_base]
-
elsif dep_obj = cmp[:dependencies]
-
if dep_cmp_type = dep_obj.is_simple_filter_component_type?()
-
pntr[:component_dependencies] << dep_cmp_type
-
end
-
end
-
end
-
ComponentOrder.update_with_applicable_dependencies!(ret,components.map{|cmp|cmp.id_handle()}.uniq)
-
end
-
-
# assumption that this is called with components having keys :id,:dependencies, :extended_base, :component_type
-
# this can be either component template or component instance with :dependencies joined in from associated template
-
1
def self.derived_order(components,&block)
-
ndx_cmps = components.inject({}){|h,cmp|h.merge(cmp[:id] => cmp)}
-
cmp_deps = get_ndx_cmp_type_and_derived_order(components)
-
Task::Action::OnComponent.generate_component_order(cmp_deps).each do |(component_id,deps)|
-
block.call(ndx_cmps[component_id])
-
end
-
end
-
-
1
private
-
1
def self.prune_if_not_applicable(cmps_with_order_info)
-
ret = Array.new
-
return ret if cmps_with_order_info.empty?
-
with_conditionals = Array.new
-
cmps_with_order_info.each do |cmp|
-
order_info = cmp[:component_order]
-
if order_info[:conditional]
-
with_conditionals << cmp
-
else
-
ret << cmp
-
end
-
end
-
with_conditionals.empty? ? ret : (prune(with_conditionals) + ret)
-
end
-
-
1
def self.prune(cmps_with_order_info)
-
# TODO: stub that just treats very specific form
-
# assuming conditional of form :":attribute_value"=>[":eq", ":attribute.<var>", <val>]
-
attrs_to_get = Hash.new
-
cmps_with_order_info.each do |cmp|
-
unexepected_form = true
-
cnd = cmp[:component_order][:conditional]
-
if cnd.kind_of?(Hash) and cnd.keys.first.to_s == ":attribute_value"
-
eq_stmt = cnd.values.first
-
if eq_stmt.kind_of?(Array) and eq_stmt[0] == ":eq"
-
if cnd.values.first[1] =~ /:attribute\.(.+$)/ and eq_stmt[2]
-
attr_name = $1
-
val = eq_stmt[2]
-
unexepected_form = false
-
match_cond = [:eq,:attribute_value,val]
-
pntr = attrs_to_get[cmp[:id]] ||= {:component => cmp, :attr_info => Array.new}
-
pntr[:attr_info] << {:attr_name => attr_name, :match_cond => match_cond, :component_order => cmp[:component_order]}
-
end
-
end
-
end
-
raise Error.new("Unexpected form") if unexepected_form
-
end
-
ret = Array.new
-
# TODO: more efficienct is getting this in bulk
-
attrs_to_get.each do |cmp_id,info|
-
info[:attr_info].each do |attr_info|
-
# if component order appears twice then taht means disjunction
-
next unless attr_val_info = info[:component].get_virtual_attribute(attr_info[:attr_name],[:attribute_value])
-
# TODO: stubbed form treating
-
match_cond = attr_info[:match_cond]
-
raise Error.new("Unexpected form") unless match_cond.size == 3 and match_cond[0] == :eq and match_cond[1] == :attribute_value
-
if attr_val_info[:attribute_value] == match_cond[2]
-
ret << info[:component].merge(:component_order => attr_info[:component_order])
-
end
-
end
-
end
-
ret
-
end
-
1
def self.update_with_order_info!(component_deps,cmps_with_order_info)
-
cmps_with_order_info.each do |info|
-
pntr = component_deps[info[:id]] ||= {:component_type=> order_info[:component_type], :component_dependencies=>Array.new}
-
dep = info[:component_order][:after]
-
pntr[:component_dependencies] << dep unless pntr[:component_dependencies].include?(dep)
-
end
-
component_deps
-
end
-
end
-
end
-
-
-
1
module DTK
-
1
class ComponentRef < Model
-
1
def self.common_cols()
-
1
[:id,:group_id,:display_name,:component_template_id,:has_override_version,:version,:component_type,:template_id_synched]
-
end
-
-
1
def self.ref(component_type,title = nil)
-
title ? ComponentTitle.ref_with_title(component_type,title) : component_type
-
end
-
1
def self.ref_from_component_hash(cmp_hash)
-
title = ComponentTitle.title?(cmp_hash)
-
ref(cmp_hash[:component_type],title)
-
end
-
-
# TODO: changed this to use '::' form but that broke the port links; determine how display name is used
-
# before making any changes; this relates to DTK-1663
-
1
def self.display_name(cmp_type,title = nil)
-
title ? ComponentTitle.display_name_with_title(cmp_type,title) : cmp_type
-
end
-
-
1
def display_name_print_form(opts={})
-
cols_to_get = [:component_type,:display_name,:ref_name]
-
update_object!(*cols_to_get)
-
component_type = self[:component_type] && self[:component_type].gsub(/__/,"::")
-
ret = component_type
-
# handle component title
-
if title = ComponentTitle.title?(self)
-
ret = ComponentTitle.print_form_with_title(ret,title)
-
end
-
ret
-
end
-
-
-
1
def self.get_referenced_component_modules(project,component_refs)
-
ret = Array.new
-
return ret if component_refs.empty?
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id,:namespace_info],
-
:filter => [:oneof, :id, component_refs.map{|r|r[:component_template_id]}.uniq]
-
}
-
aug_cmp_templates = get_objs(project.model_handle(:component),sp_hash)
-
ndx_ret = Hash.new
-
aug_cmp_templates.each do |r|
-
component_module = r[:component_module]
-
ndx = component_module[:id]
-
ndx_ret[ndx] ||= component_module.merge(:namespace_name => r[:namespace][:display_name])
-
end
-
ndx_ret.values
-
end
-
-
1
def self.print_form(cmp_ref__obj_or_hash)
-
if cmp_ref__obj_or_hash[:component_type]
-
Component.component_type_print_form(cmp_ref__obj_or_hash[:component_type])
-
elsif cmp_ref__obj_or_hash[:id]
-
"id:#{cmp_ref__obj_or_hash[:id].to_s})"
-
end
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class ComponentRelation < Model
-
end
-
end
-
1
module DTK
-
1
module ComponentTitle
-
1
def self.print_form_with_title(component_name,title)
-
"#{component_name}[#{title.to_s}]"
-
end
-
-
# this is for field display_name
-
1
def self.display_name_with_title(component_type,title)
-
"#{component_type}[#{title}]"
-
end
-
1
def self.display_name_with_title?(component_type,title=nil)
-
title ? display_name_with_title(component_type,title) : component_type
-
end
-
-
1
def self.ref_with_title(component_type,title)
-
sanitized_title = title.gsub(/\//,'__')
-
"#{component_type}--#{sanitized_title}"
-
end
-
-
1
def self.parse_component_user_friendly_name(user_friendly_name,opts={})
-
node_name = component_type = title = nil
-
cmp_display_name = Component.display_name_from_user_friendly_name(user_friendly_name)
-
cmp_node_part,title = parse_component_display_name(cmp_display_name,opts)
-
-
end
-
-
# parse_component_display_name
-
# if opts has :node_prefix, returns [node_name,component_type,title]
-
# else returns [component_type,title]
-
# if ilegal form, nil will be returned
-
# in all cases title could be nil
-
1
def self.parse_component_display_name(cmp_display_name,opts={})
-
node_name = component_type = title = nil
-
cmp_node_part = nil
-
if cmp_display_name =~ ComponentTitleRegex
-
cmp_node_part,title = [$1,$2]
-
else
-
cmp_node_part = cmp_display_name
-
end
-
-
ret = nil
-
unless opts[:node_prefix]
-
component_type = cmp_node_part
-
ret = [component_type,title]
-
else
-
if cmp_node_part =~ SplitNodeComponentType
-
node_name,component_type = [$1,$2]
-
else
-
component_type = cmp_node_part
-
end
-
ret = [node_name,component_type,title]
-
end
-
-
if component_type =~ LegalComponentType
-
ret
-
end
-
end
-
-
-
1
LegalComponentType = /^[^\/]+$/ #TODO: make more restricting
-
1
SplitNodeComponentType = /(^[^\/]+)\/([^\/]+$)/
-
-
1
def self.parse_title?(cmp_display_name)
-
if cmp_display_name =~ ComponentTitleRegex
-
$2
-
end
-
end
-
1
ComponentTitleRegex = /(^.+)\[(.+)\]$/
-
-
# component can be a hash or object
-
1
def self.title?(component)
-
return nil unless component #convienence so dont have to check argument being passed is nil
-
display_name = component[:display_name] || (component.kind_of?(Component) && component.get_field?(:display_name))
-
unless display_name
-
raise Error.new("Parameter (component) should have :display_name field")
-
end
-
component_type,title = parse_component_display_name(display_name)
-
title
-
end
-
-
end
-
end
-
1
module XYZ
-
1
module TypeHierarchyDefMixin
-
1
TypeHierarchy = {
-
:service => {
-
:app_server=>{},
-
:web_server=>{},
-
:db_server => {
-
:postgres_db_server=>{},
-
:mysql_db_server=>{},
-
:oracle_db_server=>{},
-
},
-
:monitoring_server=>{},
-
:monitoring_agent=>{},
-
:msg_bus=>{},
-
:memory_cache=>{},
-
:load_balancer=>{},
-
:firewall=>{},
-
},
-
-
:language => {
-
:ruby=>{},
-
:php=>{},
-
:perl=>{},
-
:javascript=>{},
-
:java=>{},
-
:clojure=>{},
-
},
-
-
:application => {
-
:java_app => {
-
:java_spring=>{},
-
},
-
:ruby_app => {
-
:ruby_rails=>{},
-
:ruby_ramaze=>{},
-
:ruby_sinatra=>{},
-
},
-
:php_app => {},
-
},
-
-
:extension => {},
-
-
:database => {
-
:postgres_db=>{},
-
:mysql_db=>{},
-
:oracle_db=>{},
-
},
-
-
:user => {}
-
}
-
# TODO: stub implementation
-
# given basic type give lsits of link_def_types
-
1
TypeHierarchyPossLinkDefs = {
-
:application => [
-
:database
-
]
-
}
-
# TODO: stub implementation
-
# given link_def_type returns the basic type that can the endpoint
-
1
TypeHierarchyPossRemoteComponentTypes = {
-
:database => :database
-
}
-
end
-
1
class ComponentTypeHierarchy
-
1
include TypeHierarchyDefMixin
-
-
# TODO: stub; only uses one level; not hirerarchical structure
-
1
def self.possible_link_defs(component)
-
ret = Array.new
-
basic_type = component.update_object!(:basic_type)[:basic_type]
-
return ret unless basic_type
-
TypeHierarchyPossLinkDefs[basic_type.to_sym]||Array.new
-
end
-
-
# cmps_parent_idh can be a library or project
-
# returns an array (possibley empty of components
-
# TODO: stub; only uses one level; not hirerarchical structure
-
1
def self.possible_link_def_remote_components(link_def_type,cmps_parent_idh)
-
ret = Array.new
-
basic_type = TypeHierarchyPossRemoteComponentTypes[link_def_type.to_sym]
-
return ret unless basic_type
-
cmp_mh = cmps_parent_idh.create_childMH(:component)
-
parent_col = cmp_mh.parent_id_field_name()
-
sp_hash = {
-
:cols => Component.common_columns(),
-
:filter => [:and, [:eq, parent_col, cmps_parent_idh.get_id()],
-
[:eq, :basic_type, basic_type.to_s]]
-
}
-
Model.get_objs(cmp_mh,sp_hash)
-
end
-
-
1
def self.basic_type(specific_type)
-
ret_basic_type[specific_type.to_sym]
-
end
-
-
1
def self.include?(type)
-
type && specific_types.include?(type.to_sym)
-
end
-
-
1
private
-
# adapted from http://www.ruby-forum.com/topic/163430
-
1
def self.inherited(sub)
-
34
return if sub.to_s =~ /^#<Class/ #hack to get rid of anonymous classes
-
2
add_to_subclass(sub)
-
end
-
-
1
def self.add_to_subclass(sub)
-
34
subclass_name = Aux::demodulize(sub.to_s)
-
34
(@subclass_names ||= Array.new).push(subclass_name).uniq!
-
end
-
1
def self.subclass_names()
-
1
@subclass_names
-
end
-
-
1
def self.ret_basic_type()
-
@basic_type ||= TypeHierarchy.inject({}){|h,kv|h.merge(ret_basic_type_aux(kv[0],kv[1]))}
-
end
-
-
1
def self.ret_basic_type_aux(basic_type,hier)
-
keys_in_hierarchy(hier).inject({}){|h,x| h.merge(x => basic_type)}
-
end
-
-
1
def self.keys_in_hierarchy(hier)
-
hier.inject([]){|a,kv|a + [kv[0]] + keys_in_hierarchy(kv[1])}
-
end
-
-
1
def self.find_hierarchy_under_key(key,hier=TypeHierarchy)
-
return nil if hier.empty?
-
return hier[key] if hier[key]
-
hier.values.each do |child|
-
ret = find_hierarchy_under_key(key,child)
-
return ret if ret
-
end
-
nil
-
end
-
-
1
def self.keys_under_subtype(subtype)
-
subtype_hier = find_hierarchy_under_key(subtype)
-
subtype_hier ? keys_in_hierarchy(subtype_hier) : nil
-
end
-
-
1
def self.key_associated_with_class()
-
Aux.underscore(Aux.demodulize(self.to_s)).to_sym
-
end
-
-
1
def self.specific_types()
-
return @specific_types if @specific_types
-
key = key_associated_with_class()
-
@specific_types = [key] + keys_under_subtype(key)
-
end
-
end
-
-
1
module ComponentType
-
1
def self.ret_class(type)
-
klass_name = Aux::camelize(type.to_s)
-
return nil unless ComponentTypeHierarchy.subclass_names().include?(klass_name)
-
const_get(klass_name)
-
end
-
-
# TODO: intent is to be able to add custom classes
-
1
class DbServer < ComponentTypeHierarchy
-
end
-
1
class Application < ComponentTypeHierarchy
-
end
-
-
# dynamically create all other classes not explicitly defined
-
1
def self.all_keys(x)
-
35
return Array.new unless x.kind_of?(Hash)
-
69
x.keys + x.values.map{|el|all_keys(el)}.flatten
-
end
-
1
existing_subclass_names = ComponentTypeHierarchy.subclass_names()
-
1
include TypeHierarchyDefMixin
-
1
all_keys(TypeHierarchy).each do |key|
-
34
klass_name = Aux::camelize(key)
-
34
unless existing_subclass_names.include?(klass_name)
-
32
ComponentTypeHierarchy.add_to_subclass(const_set(klass_name,Class.new(ComponentTypeHierarchy)) )
-
end
-
end
-
end
-
end
-
# TODO: simplify by changing target arg to be just idh
-
1
module XYZ
-
1
class Constraints < Array
-
1
def initialize(logical_op=:and,constraints=[])
-
super(constraints)
-
@logical_op = logical_op
-
end
-
-
1
def evaluate_given_target(target,opts={})
-
ret = evaluate_given_target_just_eval(target,opts)
-
return ret if ret
-
-
target_parent_obj = target.values.first.get_parent_id_handle().create_object
-
violations = ret_violations(target)
-
if opts[:raise_error_when_any_violation]
-
all_violations = Violation::Expression(violations["error"],violations["warning"])
-
raise ErrorConstraintViolations.new(all_violations.pp_form)
-
elsif opts[:raise_error_when_error_violation]
-
pp [:warnings, violations["warning"].pp_form]
-
Violation.save(target_parent_obj,violations["warning"])
-
raise ErrorConstraintViolations.new(violations["error"].pp_form) unless violations["error"].empty?
-
else
-
pp [:errors, violations["error"].pp_form]
-
Violation.save(target_parent_obj,violations["error"])
-
pp [:warnings, violations["warning"].pp_form]
-
Violation.save(target_parent_obj,violations["warning"])
-
end
-
ret
-
end
-
1
private
-
1
def evaluate_given_target_just_eval(target,opts={})
-
return true if self.empty?
-
self.each do |constraint|
-
constraint_holds = constraint.evaluate_given_target(target,opts)
-
case @logical_op
-
when :or
-
return true if constraint_holds
-
when :and
-
return false unless constraint_holds
-
end
-
end
-
case @logical_op
-
when :or then false
-
when :and then true
-
end
-
end
-
1
public
-
1
def ret_violations(target)
-
ret = {"error" => Violation::Expression.new(target,@logical_op), "warning" => Violation::Expression.new(target,@logical_op)}
-
-
self.each do |constraint|
-
next if constraint.evaluate_given_target(target)
-
severity = constraint[:severity] || "error"
-
ret[severity] << constraint
-
end
-
ret
-
end
-
-
1
module Macro
-
1
private
-
1
class Common
-
1
def self.component_i18n()
-
@@component_i18n ||= Model.get_i18n_mappings_for_models(:component)
-
end
-
1
def self.string_symbol_form(term)
-
if term.kind_of?(Symbol)
-
":#{term}"
-
elsif term.kind_of?(String)
-
term
-
elsif term.kind_of?(Hash)
-
term.inject({}){|h,kv|h.merge(string_symbol_form(kv[0]) => string_symbol_form(kv[1]))}
-
elsif term.kind_of?(Array)
-
term.map{|t|string_symbol_form(t)}
-
else
-
Log.error("unexpected form for term #{term.inspect}")
-
end
-
end
-
end
-
1
public
-
1
class RequiredComponent < Common
-
1
def self.search_pattern(required_component)
-
hash = {
-
:filter => [:eq, :component_type, required_component]
-
}
-
string_symbol_form(hash)
-
end
-
1
def self.description(required_component,base_component)
-
"#{print_form(required_component)} is required for #{print_form(base_component)}"
-
end
-
1
private
-
1
def self.print_form(cmp_display_name)
-
i18n = Model.i18n_string(component_i18n,:component,cmp_display_name)
-
i18n || cmp_display_name.split(name_delimiter()).map{|x|x.capitalize()}.join(" ")
-
end
-
end
-
end
-
end
-
-
1
class Constraint < HashObject
-
1
def self.create(dependency)
-
if dependency[:type] == "attribute" and dependency[:attribute_attribute_id]
-
PortConstraint.new(dependency)
-
elsif dependency[:type] == "component" and dependency[:component_component_id]
-
ComponentConstraint.new(dependency)
-
else
-
raise Error.new("unexpected dependency type")
-
end
-
end
-
1
def evaluate_given_target(target,opts={})
-
# if no :search_pattern then this is a 'necessary fail'
-
return false unless search_pattern
-
dataset = create_dataset(target)
-
rows = dataset.all
-
-
# opportunistic gathering of info
-
update_object_from_info_gathered!(opts[:update_object],rows) if opts[:update_object]
-
-
is_empty = rows.empty?
-
self[:negate] ? is_empty : (not is_empty)
-
end
-
-
1
module Macro
-
1
def self.only_one_per_node(component_type)
-
user_friendly_type = Component.display_name_print_form(component_type)
-
dep = {
-
:description => "Only one component of type #{user_friendly_type} can be on a node",
-
:severity => "error",
-
:negate => true,
-
:search_pattern => {
-
:filter => [:eq, :component_type, component_type],
-
}
-
}
-
ComponentConstraint.new(dep)
-
end
-
1
def self.base_for_extension(extension_cmp_info)
-
ext_name = extension_cmp_info[:component_type]
-
dep = {
-
:description => "Base component for extension#{ext_name ? " (#{ext_name})" : ""} not on node",
-
:severity => "error",
-
:search_pattern => {
-
:filter => [:eq, :component_type, extension_cmp_info[:extended_base]]
-
}
-
}
-
ComponentConstraint.new(dep)
-
end
-
-
1
def self.no_legal_endpoints(external_link_defs)
-
eps = external_link_defs.remote_components
-
# no search pattern means 'necessarily fail'
-
dep = {
-
:description => "Link must attach to node with a component of type (#{eps.join(", ")})",
-
:severity => "error"
-
}
-
PortConstraint.new(dep)
-
end
-
end
-
-
1
private
-
1
def initialize(dependency)
-
super
-
reformat_search_pattern!()
-
end
-
1
def reformat_search_pattern!()
-
self[:search_pattern] = search_pattern && SearchPattern.create_just_filter(search_pattern)
-
self
-
end
-
1
def search_pattern()
-
self[:search_pattern]
-
end
-
-
# overrwritten
-
1
def update_object_from_info_gathered!(object,rows)
-
raise Error.new("not treating constraint update of object of type #{obj.class.to_s}")
-
end
-
end
-
-
1
module ProcessVirtualComponentMixin
-
# converts from form that acts as if attributes are directly attached to component
-
1
def ret_join_array(join_cond)
-
real = Array.new
-
virtual = Array.new
-
real_cols = real_component_columns()
-
search_pattern.break_filter_into_conjunctions().each do |conjunction|
-
parsed_comparision = SearchPatternSimple.ret_parsed_comparison(conjunction)
-
if real_cols.include?(parsed_comparision[:col])
-
real << conjunction
-
else
-
virtual << parsed_comparision
-
end
-
end
-
-
cols = [:id,:display_name]
-
cols << join_cond.keys.first unless cols.include?(join_cond.keys.first)
-
direct_component = {
-
:model_name => :component,
-
:join_type => :inner,
-
:join_cond => join_cond,
-
:cols => cols
-
}
-
direct_component.merge!(:filter => [:and] + real) unless real.empty?
-
-
if virtual.empty?
-
[direct_component]
-
else
-
[direct_component] +
-
virtual.map do |v|
-
{
-
:model_name => :attribute,
-
:alias => v[:col],
-
:filter => [v[:op],v[:col],v[:constant]],
-
:join_type => :inner,
-
:join_cond => {:component_component_id => :component__id},
-
:cols => [:id,:display_name]
-
}
-
end
-
end
-
end
-
-
1
def real_component_columns()
-
@@real_component_columns ||= DB_REL_DEF[:component][:columns].keys
-
end
-
end
-
-
1
class ComponentConstraint < Constraint
-
1
private
-
1
include ProcessVirtualComponentMixin
-
1
def create_dataset(target)
-
node_idh =
-
if target["target_node_id_handle"]
-
target["target_node_id_handle"]
-
elsif target["target_component_id_handle"]
-
target["target_component_id_handle"].get_containing_node_id()
-
else
-
raise Error.new("unexpected target")
-
end
-
join_cond = {:node_node_id => :node__id}
-
join_array = ret_join_array(join_cond)
-
model_handle = node_idh.createMH(:node)
-
base_sp_hash = {
-
:model_name => :node,
-
:filter => [:and,[:eq,:id, node_idh.get_id()]],
-
:cols => [:id]
-
}
-
base_sp = SearchPatternSimple.new(base_sp_hash)
-
SQL::DataSetSearchPattern.create_dataset_from_join_array(model_handle,base_sp,join_array)
-
end
-
-
1
def update_object_from_info_gathered!(object,rows)
-
row = rows.first
-
return unless self[:info_gathered] and row and row[:component]
-
self[:info_gathered].each{|obj_key,k| object[obj_key] = row[:component][k]}
-
end
-
end
-
-
1
class PortConstraint < Constraint
-
1
private
-
1
include ProcessVirtualComponentMixin
-
1
def create_dataset(target)
-
other_end_idh = target[:target_port_id_handle]
-
join_cond = {:id => :attribute__component_component_id}
-
join_array = ret_join_array(join_cond)
-
model_handle = other_end_idh.createMH(:attribute)
-
base_sp_hash = {
-
:model_name => :attribute,
-
:filter => [:and,[:eq,:id, other_end_idh.get_id()]],
-
:cols => [:id,:component_component_id]
-
}
-
base_sp = SearchPatternSimple.new(base_sp_hash)
-
SQL::DataSetSearchPattern.create_dataset_from_join_array(model_handle,base_sp,join_array)
-
end
-
end
-
end
-
-
-
-
1
module XYZ
-
1
class DataSource < Model
-
# set_relation_name(:data_source,:data_source)
-
-
### virtual column defs
-
#######################
-
### object access functions
-
-
1
def self.set_collection_complete(id_handle)
-
update_from_hash_assignments(id_handle,{:last_collection_timestamp => Time.now})
-
end
-
-
#######################
-
-
# TODO: see what below we want to keep
-
# actions
-
1
class << self
-
1
def create(container_handle_id,ref,hash_content={})
-
factory_id_handle = get_factory_id_handle(container_handle_id)
-
id_handle = get_child_id_handle_from_qualified_ref(factory_id_handle,ref)
-
raise Error.new("data source #{ref} exists already") if exists? id_handle
-
-
hash_with_defaults = fill_in_defaults(ref.to_sym,hash_content)
-
create_from_hash(container_handle_id, {:data_source => {ref => hash_with_defaults}})
-
container_handle_id
-
end
-
end
-
1
private
-
# helper fns
-
1
class << self
-
1
DS_defaults = Hash.new #TBD: stub
-
1
def fill_in_defaults(ds_name,hash_content)
-
hash_with_defaults = Hash.new
-
[:source_handle,:data_source_object].each do |k|
-
v = hash_content[k] || DS_defaults[k]
-
hash_with_defaults[k] = v if v
-
end
-
if hash_with_defaults[:data_source_object]
-
hash_with_defaults[:data_source_object].each do |obj_type,child_hash_content|
-
hash_with_defaults[:data_source_object][obj_type.to_s] =
-
DataSourceObject.fill_in_defaults(ds_name,obj_type.to_sym,child_hash_content)
-
end
-
end
-
hash_with_defaults[:ds_name] = ds_name.to_s
-
hash_with_defaults
-
end
-
end
-
end
-
-
1
class DataSourceEntry < Model
-
1
attr_reader :ds_object_adapter
-
# set_relation_name(:data_source,:entry)
-
# actions
-
1
def discover_and_update()
-
marked = Array.new
-
hash_completeness_info = get_objects() do |source_obj|
-
normalize_and_update_db(@container_id_handle,source_obj,marked)
-
end
-
delete_unmarked(@container_id_handle,marked,hash_completeness_info)
-
end
-
-
# helper fns
-
1
include DataSourceAdapterInstanceMixin
-
1
include DataSourceConnectorInstanceMixin
-
-
1
def initialize(hash_scalar_values,c,relation_type)
-
super(hash_scalar_values,c,relation_type)
-
raise Error.new(":obj_type should be in hash_scalar_values") if hash_scalar_values[:obj_type].nil?
-
raise Error.new(":ds_name should be in hash_scalar_values") if hash_scalar_values[:ds_name].nil?
-
# default is to place in container that the data source root sets in
-
# TBD: logic to override if @objects_location set
-
default_container_obj = get_parent_object().get_parent_object()
-
@container_id_handle = default_container_obj.id_handle
-
@parent_ds_object = get_parent_object()
-
load_ds_connector_class()
-
load_ds_adapter_class()
-
@ds_connector_instance = nil #gets set subsequently so sharing can be done accross instances
-
end
-
1
def obj_type()
-
self[:obj_type].to_s
-
end
-
1
def ds_name()
-
self[:ds_name].to_s
-
end
-
1
def source_obj_type()
-
self[:source_obj_type] ? self[:source_obj_type].to_s : nil
-
end
-
1
def ds_is_golden_store()
-
self[:ds_is_golden_store]
-
end
-
-
1
class << self
-
1
DS_object_defaults = HashObject.new
-
1
def fill_in_defaults(ds_name,obj_type,hash_content)
-
hash_with_defaults = Hash.new
-
[:filter,:update_policy,:polling_policy,:objects_location].each do |key|
-
v = hash_content[key] || DS_object_defaults.nested_value([ds_name,key])
-
hash_with_defaults[key] = v if v
-
end
-
hash_with_defaults[:ds_name] = ds_name.to_s
-
hash_with_defaults[:obj_type] = obj_type.to_s
-
hash_with_defaults
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Dependency < Model
-
# because initially Dependency only refered to simple dependencies; introduced Simple and Links and their parent All
-
# TODO: may have what is attached to Model be Dependency::Simple and have Dependency become what is now All
-
-
1
class All; end
-
1
r8_nested_require('dependency','simple')
-
1
r8_nested_require('dependency','link')
-
1
class All
-
1
def initialize()
-
@satisfied_by_component_ids = []
-
end
-
-
1
attr_reader :satisfied_by_component_ids
-
-
1
def self.augment_component_instances!(assembly,components,opts=Opts.new)
-
return components if components.empty?
-
Dependency::Simple.augment_component_instances!(components,opts)
-
Dependency::Link.augment_component_instances!(assembly,components,opts)
-
components
-
end
-
end
-
-
# if this has simple filter, meaning test on same node as dependency then return it, normalizing to convert strings into symbols
-
1
def simple_filter_triplet?()
-
if filter = (self[:search_pattern]||{})[":filter".to_sym]
-
if self[:type] == "component" and filter.size == 3
-
logical_rel_string = filter[0]
-
field_string = filter[1]
-
if SimpleFilterRelationsToS.include?(logical_rel_string) and field_string =~ /^:/
-
[logical_rel_string.gsub(/^:/,'').to_sym,field_string.gsub(/^:/,'').to_sym,filter[2]]
-
end
-
end
-
end
-
end
-
-
1
SimpleFilterRelations = [:eq]
-
2
SimpleFilterRelationsToS = SimpleFilterRelations.map{|r|":#{r.to_s}"}
-
-
# if its simple component type match returns component type
-
1
def is_simple_filter_component_type?()
-
if filter_triplet = simple_filter_triplet?()
-
SimpleFilter.create(filter_triplet).component_type?()
-
end
-
end
-
-
1
def component_satisfies_dependency?(cmp)
-
if filter_triplet = simple_filter_triplet?()
-
SimpleFilter.create(filter_triplet).match?(cmp)
-
end
-
end
-
-
1
class SimpleFilter
-
1
def self.create(triplet)
-
const_get(triplet[0].to_s.capitalize()).new(triplet)
-
end
-
-
1
def component_type?()
-
end
-
-
1
private
-
1
def initialize(triplet)
-
@field = triplet[1]
-
@value = triplet[2]
-
end
-
-
1
class Eq < self
-
1
def match?(component)
-
component.has_key?(@field) and @value == component[@field]
-
end
-
-
1
def component_type?()
-
@value if @field == :component_type
-
end
-
end
-
end
-
-
end
-
end
-
-
-
2
module DTK; class Dependency
-
1
class Link < All
-
1
attr_reader :link_def
-
1
def initialize(link_def)
-
super()
-
@link_def = link_def
-
end
-
-
1
def self.create_dependency?(cmp_template,antec_cmp_template,opts={})
-
result = Hash.new
-
source_attr_pattern = opts[:source_attr_pattern]
-
target_attr_pattern = opts[:target_attr_pattern ]
-
unless source_attr_pattern and target_attr_pattern
-
raise Error.new("Not implemented: when opts does not include :source_attr_pattern and :target_attr_pattern")
-
end
-
external_or_internal = (target_attr_pattern.node().id() == source_attr_pattern.node().id() ? "internal" : "external")
-
aug_link_defs = cmp_template.get_augmented_link_defs()
-
if link_def_link = matching_link_def_link?(aug_link_defs,external_or_internal,antec_cmp_template)
-
unless link_def_link.matching_attribute_mapping?(target_attr_pattern,source_attr_pattern)
-
# aug_link_defs gets updated as side effect
-
link_def_link.add_attribute_mapping!(attribute_mapping_serialized_form(source_attr_pattern,target_attr_pattern))
-
incrementally_update_component_dsl?(cmp_template,aug_link_defs,opts)
-
result.merge!(:component_module_updated => true)
-
end
-
else
-
link_def_create_hash = create_link_def_and_link(external_or_internal,cmp_template,antec_cmp_template,attribute_mapping_serialized_form(source_attr_pattern,target_attr_pattern))
-
aug_link_defs = cmp_template.get_augmented_link_defs()
-
incrementally_update_component_dsl?(cmp_template,aug_link_defs,opts)
-
result.merge!(:component_module_updated => true, :link_def_created => {:hash_form => link_def_create_hash})
-
end
-
result
-
end
-
-
1
def depends_on_print_form?()
-
# link_type may be label or component_type
-
# TODO: assumption that its safe to process label through component_type_print_form
-
Component.component_type_print_form(@link_def[:link_type])
-
end
-
-
1
def self.augment_component_instances!(assembly,components,opts=Opts.new)
-
return components if components.empty?
-
link_defs = LinkDef.get(components.map{|cmp|cmp.id_handle()})
-
unless link_defs.empty?
-
link_deps = Array.new
-
components.each do |cmp|
-
cmp_id = cmp[:id]
-
matching_link_defs = link_defs.select{|ld|ld[:component_component_id] == cmp_id}
-
matching_link_defs.each do |ld|
-
dep = new(ld)
-
link_deps << dep
-
(cmp[:dependencies] ||= Array.new) << dep
-
end
-
end
-
if opts[:ret_statisfied_by] and not link_deps.empty?
-
aug_port_links = assembly.get_augmented_port_links()
-
link_deps.each{|link_dep|link_dep.set_satisfied_by_component_ids?(aug_port_links)}
-
end
-
end
-
components
-
end
-
-
1
def set_satisfied_by_component_ids?(aug_port_links)
-
link_def_id = @link_def[:id]
-
matches = aug_port_links.select{|aug_port|aug_port[:input_port][:link_def_id] == link_def_id}
-
@satisfied_by_component_ids = matches.map{|match|match[:output_port][:component_id]}
-
end
-
-
1
def satisfied_by_component_ids
-
@satisfied_by_component_ids
-
end
-
-
1
private
-
1
def self.attribute_mapping_serialized_form(source_attr_pattern,target_attr_pattern)
-
{source_attr_pattern.am_serialized_form() => target_attr_pattern.am_serialized_form()}
-
end
-
-
1
def self.matching_link_def_link?(aug_link_defs,external_or_internal,antec_cmp_template)
-
antec_cmp_type = antec_cmp_template.get_field?(:component_type)
-
matches = Array.new
-
aug_link_defs.each do |link_def|
-
(link_def[:link_def_links]||[]).each do |link|
-
if link[:remote_component_type] == antec_cmp_type and link [:type] == external_or_internal
-
matches << link
-
end
-
end
-
end
-
if matches.size > 1
-
raise Error.new("Not implemented when matching_augmented_link_def? finds more than 1 match")
-
end
-
matches.first
-
end
-
-
1
def self.create_link_def_and_link(external_or_internal,cmp_template,antec_cmp_template,am_serialized_form)
-
antec_cmp_type = antec_cmp_template[:component_type]
-
serialized_link_def =
-
{"type" => antec_cmp_template.display_name_print_form(),
-
"required"=>true,
-
"possible_links"=>
-
[{antec_cmp_type=>
-
{"type"=>external_or_internal.to_s,
-
"attribute_mappings"=> [am_serialized_form]
-
}
-
}]
-
}
-
link_def_create_hash = LinkDef.parse_from_create_dependency(serialized_link_def)
-
Model.input_hash_content_into_model(cmp_template.id_handle(),:link_def => link_def_create_hash)
-
link_def_create_hash
-
end
-
-
1
def self.incrementally_update_component_dsl?(cmp_template,aug_link_defs,opts={})
-
if update_dsl = opts[:update_dsl]
-
unless module_branch = update_dsl[:module_branch]
-
raise Error.new("If update_dsl is specified then module_branch must be provided")
-
end
-
module_branch.incrementally_update_component_dsl(aug_link_defs,:component_template=>cmp_template)
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class Dependency
-
1
class Simple < All
-
1
def initialize(dependency_obj,node)
-
super()
-
@dependency_obj = dependency_obj
-
@node = node
-
end
-
-
1
def self.create_dependency?(cmp_template,antec_cmp_template,opts={})
-
ret = Hash.new
-
unless dependency_exists?(cmp_template,antec_cmp_template)
-
create_dependency(cmp_template,antec_cmp_template,opts)
-
end
-
ret
-
end
-
-
1
def self.create_dependency(cmp_template,antec_cmp_template,opts={})
-
antec_cmp_template.update_object!(:display_name,:component_type)
-
search_pattern = {
-
':filter' => [':eq', ':component_type',antec_cmp_template[:component_type]]
-
}
-
create_row = {
-
:ref => antec_cmp_template[:component_type],
-
:component_component_id => cmp_template.id(),
-
:description => "#{antec_cmp_template.component_type_print_form()} is required for #{cmp_template.component_type_print_form()}",
-
:search_pattern => search_pattern,
-
:type => 'component',
-
:severity => 'warning'
-
}
-
dep_mh = cmp_template.model_handle().create_childMH(:dependency)
-
Model.create_from_row(dep_mh,create_row,:convert=>true,:returning_sql_cols=>create_or_exists_cols())
-
end
-
1
class << self
-
1
private
-
1
def dependency_exists?(cmp_template,antec_cmp_template)
-
sp_hash = {
-
:cols => create_or_exists_cols(),
-
:filter => [:and,[:eq,:component_component_id,cmp_template.id()],
-
[:eq,:ref,antec_cmp_template.get_field?(:component_type)]]
-
}
-
Model.get_obj(cmp_template.model_handle(:dependency),sp_hash)
-
end
-
1
def create_or_exists_cols()
-
[:id,:group_id,:component_component_id,:search_pattern,:type,:description,:severity]
-
end
-
end
-
-
1
def depends_on_print_form?()
-
if cmp_type = @dependency_obj.is_simple_filter_component_type?()
-
Component.component_type_print_form(cmp_type)
-
end
-
end
-
-
1
def self.augment_component_instances!(components,opts=Opts.new)
-
return components if components.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:component_component_id,:search_pattern,:type,:description,:severity],
-
:filter => [:oneof,:component_component_id,components.map{|cmp|cmp.id()}]
-
}
-
dep_mh = components.first.model_handle(:dependency)
-
-
dep_objs = Model.get_objs(dep_mh,sp_hash)
-
return components if dep_objs.empty?
-
-
simple_deps = Array.new
-
ndx_components = components.inject(Hash.new){|h,cmp|h.merge(cmp[:id] => cmp)}
-
dep_objs.each do |dep_obj|
-
cmp = ndx_components[dep_obj[:component_component_id]]
-
dep = new(dep_obj,cmp[:node])
-
simple_deps << dep
-
(cmp[:dependencies] ||= Array.new) << dep
-
end
-
if opts[:ret_statisfied_by] and not simple_deps.empty?()
-
satisify_cmps = get_components_that_satisify_deps(simple_deps)
-
-
unless satisify_cmps.empty?
-
simple_deps.each{|simple_dep|simple_dep.set_satisfied_by_component_ids?(satisify_cmps)}
-
end
-
end
-
components
-
end
-
-
1
def set_satisfied_by_component_ids?(satisify_cmps)
-
match_cmp = satisify_cmps.find do |cmp|
-
(cmp[:node_node_id] == @node[:id]) and @dependency_obj.component_satisfies_dependency?(cmp)
-
end
-
@satisfied_by_component_ids << match_cmp.id() if match_cmp
-
end
-
-
1
attr_reader :dependency_obj, :node
-
1
private
-
1
def self.get_components_that_satisify_deps(dep_list)
-
ret = Array.new
-
query_disjuncts = dep_list.map do |simple_dep|
-
dep_obj = simple_dep.dependency_obj
-
if filter = dep_obj.simple_filter_triplet?()
-
[:and,[:eq,:node_node_id,simple_dep.node.id()],filter]
-
else
-
Log.error("Ignoring a simple dependency that is not a simple filter (#{simple_dep.dependency_obj})")
-
nil
-
end
-
end.compact
-
if query_disjuncts.empty?
-
return ret
-
end
-
cmp_mh = dep_list.first.node.model_handle(:component)
-
filter = (query_disjuncts.size == 1 ? query_disjuncts.first : [:or] + query_disjuncts)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_type,:node_node_id],
-
:filter => filter
-
}
-
Model.get_objs(cmp_mh,sp_hash)
-
end
-
-
end
-
end; end
-
1
module DTK
-
1
module DeveloperMixin
-
1
def self.initiate_inject_agent(action_results_queue, nodes, params)
-
Action::InjectAgent.initiate(nodes,action_results_queue, params)
-
end
-
-
1
module Action
-
1
class InjectAgent < ActionResultsQueue::Result
-
1
def self.initiate(nodes, action_results_queue, params)
-
# if nodes empty return error message, case where more nodes are matches should not happen
-
if nodes.empty?
-
action_results_queue.push(:error, "No nodes have been matched to node identifier: #{params[:node_pattern]}")
-
return
-
end
-
-
indexes = nodes.map{|r|r[:id]}
-
action_results_queue.set_indexes!(indexes)
-
ndx_pbuilderid_to_node_info = nodes.inject(Hash.new) do |h,n|
-
h.merge(n.pbuilderid => {:id => n[:id], :display_name => n[:display_name]})
-
end
-
-
callbacks = {
-
:on_msg_received => proc do |msg|
-
response = CommandAndControl.parse_response__execute_action(nodes,msg)
-
-
if response and response[:pbuilderid] and response[:status] == :ok
-
node_info = ndx_pbuilderid_to_node_info[response[:pbuilderid]]
-
action_results_queue.push(node_info[:id],response[:data])
-
end
-
end
-
}
-
-
CommandAndControl.request__execute_action(:dev_manager,:inject_agent,nodes,callbacks,params)
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class DNS
-
1
class Assignment
-
1
attr_reader :address
-
1
def initialize(address)
-
@address = address
-
end
-
end
-
1
r8_nested_require('dns','r8')
-
end
-
end
-
1
module DTK
-
1
class DNS
-
1
class R8 < self
-
1
def initialize(node)
-
@node = node
-
end
-
1
def self.generate_node_assignment?(node)
-
new(node).generate_node_assignment?()
-
end
-
-
1
def generate_node_assignment?()
-
unless aug_node = aug_node_when_dns_enabled?()
-
return nil
-
end
-
-
unless domain = ::R8::Config[:dns][:r8][:domain]
-
raise Error.new("Server config variable (dns.r8.domain) has not been set")
-
end
-
-
unless tenant = ::R8::Config[:dns][:r8][:tenant_name]
-
raise Error.new("Server config variable (dns.r8.tenant_name) has not been set")
-
end
-
-
dns_info = {
-
:assembly => aug_node[:assembly][:display_name],
-
:node => aug_node[:display_name],
-
:user => CurrentSession.get_username(),
-
:tenant => tenant,
-
:domain => domain
-
}
-
Assignment.new(dns_address(dns_info))
-
end
-
-
1
private
-
1
def dns_address(info)
-
# TODO: should validate ::R8::Config[:dns][:r8][:format]
-
format = ::R8::Config[:dns][:r8][:format] || DefaultFormat
-
ret = format.dup
-
[:node,:assembly,:user,:tenant,:domain].each do |part|
-
ret.gsub!(Regexp.new("\\${#{part}}"),info[part])
-
end
-
ret
-
end
-
1
DefaultFormat = "${node}.${assembly}.${user}.${tenant}.${domain}"
-
-
1
def aug_node_when_dns_enabled?()
-
if aug_node = get_aug_node_when_dns_info?()
-
# check it has a true value; to be robust looking for a string or a Boolean
-
if val = (aug_node[:dns_enabled_attribute]||{})[:attribute_value]
-
if val.kind_of?(String)
-
aug_node if (val =~ /^(t|T)/)
-
else
-
aug_node if val.kind_of?(TrueClass)
-
end
-
end
-
end
-
end
-
-
1
def get_aug_node_when_dns_info?()
-
sp_hash = {
-
:cols => [:dns_enabled_on_node,:id,:group_id,:display_name]
-
}
-
# checking for multiple rows to handle case where multiple dns attributes given
-
aug_nodes = @node.get_objs(sp_hash)
-
-
if aug_nodes.empty?
-
# This can wil be empty only if no assembly tied to node
-
# This is expected if node is target ref
-
# TODO: dont think dns enabledment works with node groups
-
@node.update_obj!(:display_name,:type)
-
unless @node[:type] == 'target_ref'
-
Log.error_pp(["unexpected that that following node not tied to assembly",@node])
-
end
-
end
-
-
if ret = select_aug_node?(aug_nodes)
-
return ret
-
end
-
-
sp_hash = {
-
:cols => [:dns_enabled_on_assembly,:id,:group_id,:display_name]
-
}
-
-
aug_nodes = @node.get_objs(sp_hash)
-
select_aug_node?(aug_nodes)
-
end
-
-
1
def select_aug_node?(aug_nodes)
-
aug_nodes.reject{|n|n[:dns_enabled_attribute].nil?}.sort do |n1,n2|
-
DNS.attr_rank(n2[:dns_enabled_attribute]) <=> DNS.attr_rank(n1[:dns_enabled_attribute])
-
end.first
-
end
-
-
1
def attr_rank(attr)
-
ret = LowestRank
-
if attr_name = (attr||{})[:display_name]
-
if rank = RankPos[attr_name]
-
ret = rank
-
end
-
end
-
ret
-
end
-
-
1
AttributeKeys = Node::DNS::AttributeKeys
-
# Assumes that AttributeKeys has been defined already
-
1
RankPos = AttributeKeys.inject(Hash.new) {|h,ak|
-
3
h.merge(ak => AttributeKeys.index(ak))
-
}
-
1
LowestRank = AttributeKeys.size
-
end
-
end
-
end
-
-
-
1
module DTK
-
1
module FactoryObject
-
1
CommonCols = COMMON_REL_COLUMNS.keys - [:local_id,:c,:created_at,:updated_at]
-
end
-
1
module FactoryObjectCommon
-
1
def assembly_template_node_ref(assembly_ref,node_ref)
-
"#{assembly_ref}--#{node_ref}"
-
end
-
end
-
-
1
module FactoryObjectMixin
-
1
include FactoryObjectCommon
-
1
def qualified_ref(obj_hash)
-
"#{obj_hash[:ref]}#{obj_hash[:ref_num] ? "-#{obj_hash[:ref_num].to_s}" : ""}"
-
end
-
-
1
def id_handle_if_object_exists?()
-
ret = id_handle()
-
ret if ret.get_id()
-
end
-
end
-
1
module FactoryObjectClassMixin
-
1
include FactoryObjectCommon
-
-
1
def create(model_handle,hash_values)
-
idh = (hash_values[:id] ? model_handle.createIDH(:id => hash_values[:id]) : model_handle.create_stubIDH())
-
new(hash_values,model_handle[:c],model_name(),idh)
-
end
-
1
def subclass_model(model_object)
-
new(model_object,model_object.model_handle[:c],model_name(),model_object.id_handle())
-
end
-
end
-
end
-
1
module DTK
-
1
class FileAsset < Model
-
# model apis
-
1
def get_content()
-
# if content stored in db then return that
-
if cache_content?()
-
return self[:content] if self[:content]
-
end
-
update_object!(:path,:implementation_info)
-
content = RepoManager.get_file_content(self,{:implementation => self[:implementation]})
-
if cache_content?()
-
# TODO: determine whether makes sense to store newly gotten content in db or just do this if any changes
-
end
-
content
-
end
-
-
1
def update_content(content)
-
if cache_content?()
-
update(:content => content)
-
end
-
update_object!(:path,:implementation_info)
-
-
# TODO: trap parse errors and then do consitemncy check with meta
-
config_agent_type = config_agent_type()
-
file_path = self[:path]
-
# file_config_type, r8_parse = ConfigAgent.parse_given_file_content(config_agent_type,file_path,content)
-
-
impl_obj = self[:implementation]
-
RepoManager.update_file_content(self,content,{:implementation => impl_obj})
-
impl_obj.set_to_indicate_updated()
-
-
# special processing if this the meta file
-
if ModuleDSL.isa_dsl_filename?(self[:path])
-
target_impl = self[:implementation]
-
component_dsl = ModuleDSL.create_from_file_obj_hash(target_impl,self[:path],content)
-
component_dsl.update_model()
-
end
-
impl_obj.create_pending_changes_and_clear_dynamic_attrs(self)
-
end
-
-
# returns sha of remote head
-
1
def self.add_and_push_to_repo(impl_obj,type,path,content,opts={})
-
add(impl_obj,type,path,content,opts)
-
sha_remote_head = RepoManager.push_implementation(:implementation => impl_obj)
-
sha_remote_head
-
end
-
1
def self.add(impl_obj,type,path,content,opts={})
-
hash = ret_create_hash(impl_obj,type,path,content)
-
file_asset_mh = impl_obj.model_handle.create_childMH(:file_asset)
-
new_file_asset_idh = create_from_row(file_asset_mh,hash)
-
new_file_asset_obj = new_file_asset_idh.create_object().merge(hash)
-
RepoManager.add_file(new_file_asset_obj,content,{:implementation => impl_obj})
-
unless opts[:is_metafile]
-
impl_obj.create_pending_changes_and_clear_dynamic_attrs(new_file_asset_obj)
-
end
-
end
-
-
1
def self.ret_create_hash(impl_obj,type,path,content=nil)
-
file_name = (path =~ Regexp.new("/([^/]+$)")) ? $1 : path
-
{
-
:type => type,
-
:ref => file_asset_ref(path),
-
:file_name => file_name,
-
:display_name => file_name,
-
:path => path,
-
:content => cache_content?() ? content : nil,
-
:implementation_implementation_id => impl_obj.id()
-
}
-
end
-
-
1
def self.ret_hierrachical_file_struct(flat_file_assets)
-
ret = Array.new
-
flat_file_assets.each{|f| set_hierrachical_file_struct!(ret,f)}
-
ret
-
end
-
-
1
def self.set_hierrachical_file_struct!(ret,file_asset,path=nil)
-
path ||= file_asset[:path].split("/")
-
if path.size == 1
-
ret << file_asset.merge(:model_name => "file_asset")
-
else
-
dir = ret.find{|x|x[:display_name] == path[0] and x[:model_name] == "directory_asset"}
-
unless dir
-
dir = {
-
:model_name => "directory_asset",
-
:display_name => path[0]
-
}
-
ret << dir
-
end
-
children = dir[:children] ||= Array.new
-
set_hierrachical_file_struct!(children,file_asset,path[1..path.size-1])
-
end
-
end
-
-
1
protected
-
1
def config_agent_type()
-
update_object!(:type)
-
case self[:type]
-
when "puppet_file" then :puppet
-
when "chef_file" then :chef
-
else raise Error.new("Unexpected type (#{self[:type]})")
-
end
-
end
-
-
1
private
-
1
def self.file_asset_ref(path)
-
path.gsub(Regexp.new("/"),"_")
-
end
-
-
1
def cache_content?()
-
self.class.cache_content?()
-
end
-
1
def self.cache_content?()
-
R8::Config[:file_asset][:cache_content]
-
end
-
-
end
-
end
-
-
1
r8_require('branch_names')
-
1
module DTK
-
1
class Implementation < Model
-
1
include BranchNamesMixin
-
1
extend BranchNamesClassMixin
-
-
1
def self.common_columns()
-
1
[:id,:group_id,:display_name,:type,:repo,:module_name,:module_namespace,:parse_state,:branch,:version,:updated,:repo_id,:assembly_id]
-
end
-
-
1
def modify_file_assets(diff_summary)
-
paths_to_delete = diff_summary.paths_to_delete
-
paths_to_add = diff_summary.paths_to_add
-
-
# find relevant existing files
-
sp_hash = {
-
:cols => [:id,:display_name,:path],
-
:filter => [:and,[:eq,:implementation_implementation_id,id()], [:oneof,:path,paths_to_delete+paths_to_add]]
-
}
-
file_assets = Model.get_objs(model_handle(:file_asset),sp_hash)
-
# delete relevant files
-
files_to_delete = file_assets.select{|r|paths_to_delete.include?(r[:path])}
-
unless files_to_delete.empty?
-
Model.delete_instances(files_to_delete.map{|r|r.id_handle()})
-
end
-
-
# add files not already added
-
existing_paths = file_assets.map{|r|r[:path]}
-
paths_to_add.reject!{|path|existing_paths.include?(path)}
-
unless paths_to_add.empty?
-
type = "puppet_file" #TODO: hard coded
-
create_rows = paths_to_add.map{|path|FileAsset.ret_create_hash(self,type,path)}
-
Model.create_from_rows(child_model_handle(:file_asset),create_rows)
-
end
-
end
-
-
1
def self.create?(project,local_params,repo,config_agent_type)
-
# was local = local_params.create_local(project) which is fine for import from puppet-forge
-
# for import-git we use local object, so this is a temp workaround
-
local = local_params.is_a?(ModuleBranch::Location::Server::Local) ? local_params : local_params.create_local(project)
-
project = local.project
-
version = local.version
-
module_name = local.module_name
-
module_namespace = local.module_namespace_name
-
branch = local.branch_name
-
-
match_assigns = {
-
:module_name => module_name,
-
:branch => branch,
-
:module_namespace => module_namespace
-
}
-
impl_hash = {
-
:display_name => version ? "#{module_name}(#{version})" : module_name,
-
:type => ImplementationType[config_agent_type],
-
:repo => repo.get_field?(:repo_name),
-
:repo_id => repo.id,
-
:project_project_id => project.id,
-
:version => version_field(version)
-
}
-
impl_ref = ref(module_namespace,module_name,branch)
-
impl_mh = project.id_handle().create_childMH(:implementation)
-
create_from_row?(impl_mh,impl_ref,match_assigns,impl_hash).create_object().merge(impl_hash)
-
end
-
-
1
def self.ref(namespace,module_name,branch)
-
"#{namespace}-#{module_name}-#{branch}"
-
end
-
1
private_class_method :ref
-
-
1
def self.delete_repos_and_implementations(model_handle,module_name)
-
sp_hash = {
-
:cols => [:id,:module_name,:repo_id],
-
:filter => [:eq, :module_name, module_name]
-
}
-
impls = get_objs(model_handle,sp_hash)
-
return if impls.empty?
-
-
sp_hash = {
-
:cols => [:id,:repo_name,:local_dir],
-
:filter => [:oneof,:id,impls.map{|r|r[:repo_id]}.uniq]
-
}
-
repos = get_objs(model_handle.createMH(:repo),sp_hash)
-
-
sp_hash = {
-
:cols => [:id,:display_name],
-
:filter => [:oneof,:implementation_id,impls.map{|r|r[:id]}.uniq]
-
}
-
cmps = get_objs(model_handle.createMH(:component),sp_hash)
-
-
repos.each{|repo|RepoManager.delete_repo(repo)}
-
-
Model.delete_instances(cmps.map{|cmp|cmp.id_handle()})
-
Model.delete_instances(repos.map{|repo|repo.id_handle()})
-
Model.delete_instances(impls.map{|impl|impl.id_handle()})
-
end
-
-
1
def add_file_and_push_to_repo(file_path,content,opts={})
-
update_object!(:type,:repo,:branch)
-
file_type = ImplTypeToFileType[self[:type]]
-
FileAsset.add_and_push_to_repo(self,file_type,file_path,content,opts)
-
end
-
-
1
def create_file_assets_from_dir_els()
-
update_object!(:type,:repo,:branch)
-
-
file_type = ImplTypeToFileType[self[:type]]
-
file_asset_rows = all_file_paths().map do |file_path|
-
content = nil #TODO to clear model cache of content
-
FileAsset.ret_create_hash(self,file_type,file_path,content)
-
end
-
return if file_asset_rows.empty?()
-
-
# TODO: need to make create? from rows
-
file_asset_mh = model_handle().create_childMH(:file_asset)
-
Model.modify_children_from_rows(file_asset_mh,id_handle,file_asset_rows)
-
end
-
-
1
def all_file_paths()
-
RepoManager.ls_r('*',{:file_only=>true},self)
-
end
-
-
1
def add_contained_files_and_push_to_repo()
-
context = repo_manager_context()
-
RepoManager.add_all_files(context)
-
RepoManager.push_implementation(context)
-
end
-
-
1
def move_to_provider_subdir(source, destination)
-
context = repo_manager_context()
-
-
files = (RepoManager.ls_r(1, {:file_only=>true} ,self)||[])
-
files.reject!{|f| f=~DSLFilenameRegexp[1] || f=~DSLFilenameRegexp[2] || f=~DSLFilenameRegexp[3]}
-
-
folders = (RepoManager.ls_r(1, {:directory_only=>true} ,self)||[]) - ExcludeFolders
-
RepoManager.move_content(source, destination, files, folders, context)
-
end
-
1
DSLFilenameRegexp = {
-
1 => /^r8meta\.[a-z]+\.([a-z]+$)/,
-
2 => /^dtk\.model\.([a-z_]+$)/,
-
3 => /^module_refs\.([a-z]+$)/,
-
}
-
1
ExcludeFolders = ["puppet"]
-
-
1
def repo_manager_context()
-
update_object!(:repo,:branch)
-
{
-
:implementation => {
-
:repo => self[:repo],
-
:branch => self[:branch]
-
}
-
}
-
end
-
1
private :repo_manager_context
-
-
1
def get_module_branch()
-
get_obj(:cols => [:repo_id,:branch,:module_branch])[:module_branch]
-
end
-
-
1
def get_asset_files()
-
flat_file_assets = get_objs_col({:cols => [:file_assets]},:file_asset).reject{|k,v|k == :implementation_implementation_id}
-
FileAsset.ret_hierrachical_file_struct(flat_file_assets)
-
end
-
-
# indexed by implementation_id
-
1
def self.get_indexed_asset_files(id_handles)
-
flat_file_assets = get_objs_in_set(id_handles,{:cols => [:id,:file_assets]})
-
ret = Hash.new
-
flat_file_assets.each do |r|
-
pointer = ret[r[:id]] ||= Array.new
-
file_asset = r[:file_asset].reject{|k,v|k == :implementation_implementation_id}
-
FileAsset.set_hierrachical_file_struct!(pointer,file_asset)
-
end
-
ret
-
end
-
-
1
def add_asset_file(path,content=nil)
-
update_object!(:type,:repo,:branch)
-
file_asset_type = FileAssetType[self[:type].to_sym]
-
FileAsset.add(self,file_asset_type,path,content)
-
end
-
1
FileAssetType = {
-
:chef_cookbook => "chef_file"
-
}
-
-
1
def add_model_specific_override_attrs!(override_attrs,target_obj)
-
override_attrs[:updated] ||= false
-
end
-
-
1
def set_to_indicate_updated()
-
# TODO: short cut and avoid setting updated on project templates if impl set to updated already update({:updated => true},{:update_only_if_change => true})
-
update(:updated => true)
-
# set updated for the project templates that point to this implemntation
-
cmp_mh = model_handle(:component)
-
filter = [:and, [:eq, :implementation_id, id()], [:eq, :type, "template"]]
-
Model.update_rows_meeting_filter(cmp_mh,{:updated => true},filter)
-
end
-
-
1
def create_pending_changes_and_clear_dynamic_attrs(file_asset)
-
cmp_rows = get_objs({:cols => [:component_summary_info]})
-
# remove any node groups
-
cmp_rows.reject!{|r|r[:node].is_node_group?}
-
-
Component.clear_dynamic_attributes_and_their_dependents(cmp_rows.map{|r|r[:component].id_handle()})
-
-
# TODO: make more efficient by using StateChange.create_pending_change_items
-
cmp_rows.each do |r|
-
cmp_idh = r[:component].id_handle()
-
parent_idh = cmp_idh.createIDH(:model_name => :datacenter, :id => r[:node][:datacenter_datacenter_id])
-
StateChange.create_pending_change_item(:new_item => cmp_idh, :parent => parent_idh, :type => "update_implementation")
-
end
-
end
-
-
1
ImplementationType = {
-
:puppet => "puppet_module",
-
:chef => "chef_cookbook"
-
}
-
1
ImplTypeToFileType = {
-
"puppet_module" => "puppet_file",
-
"chef_cookbook" => "chef_file"
-
}
-
-
# ####### TODO below related to UI and may deprecate
-
-
# TODO: unify with project#get_module_tree()
-
1
def get_module_tree(opts={})
-
sp_hash = {:cols => [:id,:display_name,:type,:project_project_id,:component_template]}
-
rows_with_cmps = get_objs(sp_hash)
-
-
i18n = get_i18n_mappings_for_models(:component)
-
cmps = rows_with_cmps.map do |r|
-
cmp = r[:component].materialize!(Component.common_columns())
-
# TODO: see if cleaner way to put in i18n names
-
cmp[:name] = i18n_string(i18n,:component, cmp[:name])
-
cmp
-
end
-
# all rows common on all columns expect for :component
-
ret_row = rows_with_cmps.first.reject{|k,v|k == :component}
-
ret_row.merge!(:components => cmps)
-
return [ret_row] unless opts[:include_file_assets]
-
-
indexed_asset_files = Implementation.get_indexed_asset_files([id_handle])
-
ret_row.merge!(:file_assets => indexed_asset_files.values.first)
-
[ret_row]
-
end
-
-
-
1
def get_tree(opts={})
-
sp_hash = {:cols => [:id,:display_name,:component_template]}
-
rows = get_objs(sp_hash)
-
# all rows agree on everything but col
-
ret = rows.first.reject{|k,v|k == :component}
-
ret.merge!(:components => rows.map{|r|r[:component]})
-
if opts[:include_file_assets]
-
ret.merge!(:file_assets => self.class.get_indexed_asset_files([id_handle]))
-
end
-
ret
-
end
-
-
-
end
-
end
-
-
1
module DTK
-
1
class ModelInit
-
1
toplevel_model_files = %w{model user user_group user_group_relation repo repo_user repo_remote repo_user_acl attribute attribute_override port port_link monitoring_item attribute_link node service_node_group node_group node_group_relation network component component_ref component_type_hierarchy assembly library region target task task_log task_event task_error data_source state_change search_object dependency component_order constraints violation layout component_database link_def link_def component_relation file_asset implementation project node_binding_ruleset dns component_title module_ref module_refs module workspace namespace node_bindings node_image node_image_attribute action_def}
-
-
1
toplevel_model_files.each { |model_file|
-
57
require File.expand_path(model_file, File.dirname(__FILE__))
-
}
-
-
# associate database handle DBInstance with all models
-
1
model_names = Model.initialize_all_models(DBinstance)
-
end
-
end
-
-
-
# TODO: unify with view_def_processor
-
1
module XYZ
-
1
class Layout < Model
-
-
1
def self.save(parent_id_handle,layout_info)
-
name = "foo" #TODO: stub
-
hash = {
-
:display_name => name
-
}.merge(layout_info)
-
create_hash = {:layout => {name => hash}}
-
-
new_id = create_from_hash(parent_id_handle,create_hash).map{|x|x[:id]}.first
-
new_id
-
end
-
-
1
def self.create_and_save_from_field_def(parent_id_handle,field_def,view_type)
-
layout_def = create_def_from_field_def(field_def,view_type)
-
layout_info = {
-
:def => layout_def,
-
:type => view_type.to_s
-
}
-
save(parent_id_handle,layout_info)
-
end
-
-
1
def self.create_def_from_field_def(field_def,view_type)
-
=begin
-
case view_type.to_s
-
when "wspace-edit" then LayoutViewDefProcessor.layout_groups_from_field_def__edit(field_def)
-
else raise Error.new("type #{view_type} is unexpected")
-
end
-
=end
-
groups = LayoutViewDefProcessor.layout_groups_from_field_def__edit(field_def)
-
{:groups => groups}
-
end
-
1
private
-
1
module LayoutViewDefProcessor
-
1
def self.layout_groups_from_field_def__edit(field_def)
-
indexed_groups = Hash.new
-
field_def.each do |el|
-
index = group_index(el)
-
indexed_groups[index] ||= {
-
:name => group_name(el),
-
:num_cols =>1,
-
:i18n => group_i18n(el),
-
:fields => Array.new
-
}
-
indexed_groups[index][:fields] << field_list__edit(el)
-
end
-
indexed_groups.values
-
end
-
-
1
def self.group_index(el)
-
el[:component_id]
-
end
-
1
def self.group_i18n(el)
-
if el[:node_name]
-
"#{el[:node_name]}/#{el[:component_i18n]}"
-
else
-
el[:component_i18n]
-
end
-
end
-
-
1
def self.group_name(el)
-
group_i18n(el).gsub(/[^A-Za-z0-9_]/,"_")
-
end
-
-
1
def self.field_list__edit(el)
-
{:name => el[:name],
-
:type => convert_type(el[:type]),
-
:help => el[:description] || '',
-
:rows => 1,
-
:cols => 40,
-
:id => "{%=component_id[:#{el[:name]}]%}",
-
:override_name => "{%=component_id[:#{el[:name]}]%}"
-
}
-
end
-
1
def self.convert_type(data_type)
-
TypeConvert[data_type]||"text"
-
end
-
1
TypeConvert = {
-
"string" => "text",
-
"json" => "hash",
-
"integer" => "integer"
-
}
-
end
-
end
-
end
-
1
module XYZ
-
1
class Library < Model
-
### get methods
-
-
1
def get_node_binding_rulesets(filter=nil)
-
full_filter = [:eq,:library_library_id,id()]
-
if filter
-
full_filter = [:and,full_filter,filter]
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:ref],
-
:filter => full_filter
-
}
-
Model.get_objs(model_handle(:node_binding_ruleset),sp_hash,:keep_ref_cols => true)
-
end
-
-
### end: get methods
-
-
1
class << self
-
1
def create_users_private_library?(model_handle)
-
user_obj = CurrentSession.new.get_user_object()
-
private_group_obj = user_obj.get_private_group()
-
library_mh = model_handle.createMH(:model_name => :library, :group_id => private_group_obj[:id])
-
username = user_obj[:username]
-
ref = users_private_library_ref(username)
-
lib_name = users_private_library_name(username)
-
Model.create_from_row?(library_mh,ref,{:display_name => lib_name})
-
end
-
-
1
def create_public_library?(model_handle)
-
ref = lib_name = public_library_name()
-
Model.create_from_row?(model_handle,ref,{:display_name => lib_name})
-
end
-
-
1
def get_users_private_library(model_handle,username=nil)
-
username ||= CurrentSession.new.get_username()
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id],
-
:filter => [:eq,:display_name,users_private_library_name(username)]
-
}
-
get_obj(model_handle,sp_hash)
-
end
-
-
1
def get_public_library(model_handle)
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id],
-
:filter => [:eq,:display_name,public_library_name()]
-
}
-
get_obj(model_handle,sp_hash)
-
end
-
-
1
def check_valid_id(model_handle,id)
-
check_valid_id_default(model_handle,id)
-
end
-
-
1
def name_to_id(model_handle,name)
-
name_to_id_default(model_handle,name)
-
end
-
-
1
private
-
1
def users_private_library_name(username)
-
"private"
-
end
-
1
def users_private_library_ref(username)
-
"private-#{username}"
-
end
-
-
1
def public_library_name()
-
"public"
-
end
-
end
-
-
1
def info_about(about,opts={})
-
case about
-
when :assemblies
-
filter = [:eq, :library_library_id, id()]
-
Assembly::Template.list(model_handle(:component),:filter => filter)
-
when :nodes
-
filter = [:eq, :library_library_id, id()]
-
Node::Template.list(model_handle,:filter => filter)
-
when :components
-
Component::Template.list(model_handle,:library_idh => id_handle())
-
else
-
raise Error.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def clone_post_copy_hook(clone_copy_output,opts={})
-
new_id_handle = clone_copy_output.id_handles.first
-
# TODO: hack; this should be optained from clone_copy_output
-
new_assembly_obj = new_id_handle.create_object().update_object!(:display_name)
-
case new_id_handle[:model_name]
-
when :component then clear_dynamic_attributes(new_id_handle,opts)
-
end
-
level = 1
-
node_hash_list = clone_copy_output.get_children_object_info(level,:node)
-
unless node_hash_list.empty?
-
node_mh = new_id_handle.createMH(:node)
-
clone_post_copy_hook__child_nodes(node_mh,node_hash_list,new_assembly_obj)
-
end
-
end
-
1
private
-
1
def clone_post_copy_hook__child_nodes(node_mh,node_hash_list,new_assembly_obj)
-
rows = node_hash_list.map do |r|
-
ext_ref = r[:external_ref] && r[:external_ref].reject{|k,v|k == :instance_id}.merge(:type => "ec2_image")
-
update_row = {
-
:id => r[:id],
-
:external_ref => ext_ref,
-
:operational_status => nil,
-
:is_deployed => false
-
}
-
assembly_name = new_assembly_obj[:display_name]
-
update_row[:display_name] = "#{assembly_name}-#{r[:display_name]}" if assembly_name and r[:display_name]
-
update_row
-
end
-
Model.update_from_rows(node_mh,rows)
-
end
-
-
1
def clear_dynamic_attributes(new_id_handle,opts)
-
attrs_to_clear = get_dynamic_attributes(:node,new_id_handle) + get_dynamic_attributes(:component,new_id_handle)
-
Attribute.clear_dynamic_attributes_and_their_dependents(attrs_to_clear,:add_state_changes => false)
-
end
-
1
private
-
# returns attributes that will be cleared
-
1
def get_dynamic_attributes(model_name,new_id_handle)
-
if model_name == :component
-
col = :node_assembly_parts_cmp_attrs
-
elsif model_name == :node
-
col = :node_assembly_parts_node_attrs
-
else
-
raise Error.new("unexpected model_name #{model_name}")
-
end
-
sp_hash = {
-
:filter => [:and,[:eq, :id, new_id_handle.get_id()],[:eq, :type, "composite"]],
-
:columns => [col]
-
}
-
cmp_mh = new_id_handle.createMH(:component)
-
Model.get_objs(cmp_mh,sp_hash).map do |r|
-
attr = r[:attribute]
-
attr if attr[:dynamic]
-
end.compact
-
end
-
end
-
end
-
1
module DTK
-
1
class LinkDef < Model
-
1
r8_nested_require('link_def','link')
-
1
r8_nested_require('link_def','context')
-
1
r8_nested_require('link_def','auto_complete')
-
1
r8_nested_require('link_def','parse_serialized_form')
-
1
r8_nested_require('link_def','info')
-
1
extend ParseSerializedFormClassMixin
-
-
1
def self.common_columns()
-
2
[:id,:group_id,:display_name,:description,:local_or_remote,:link_type,:required,:dangling,:has_external_link,:has_internal_link,:component_component_id]
-
end
-
-
1
def self.get_link_defs_matching_antecendent(dep_cmp_template,antec_cmp_template)
-
ret = Array.new
-
link_defs = get([dep_cmp_template.id_handle])
-
return ret if link_defs.empty?
-
link_def_idhs = link_defs.map{|ld|ld.id_handle()}
-
antec_cmp_type = antec_cmp_template.get_field?(:component_type)
-
matching_ld_links = get_link_def_links(link_def_idhs,:cols => [:link_def_id], :filter => [:eq,:remote_component_type,antec_cmp_type])
-
matching_ld_ids = matching_ld_links.map{|ld_link|ld_link[:link_def_id]}
-
link_defs.select{|ld|matching_ld_ids.include?(ld[:id])}
-
end
-
-
1
def self.get(component_template_idhs)
-
ret = Array.new
-
return ret if component_template_idhs.empty?()
-
sp_hash = {
-
:cols => common_columns(),
-
:filter => [:oneof,:component_component_id,component_template_idhs.map{|idh|idh.get_id()}]
-
}
-
link_def_mh = component_template_idhs.first.createMH(:link_def)
-
get_objs(link_def_mh,sp_hash)
-
end
-
-
1
def self.get_link_def_links(link_def_idhs,opts={})
-
ret = Array.new
-
return ret if link_def_idhs.empty?
-
filter = [:oneof,:link_def_id,link_def_idhs.map{|idh|idh.get_id()}]
-
if opts[:filter]
-
filter = [:and,filter,opts[:filter]]
-
end
-
sp_hash = {
-
:cols => opts[:cols]||Link.common_columns(),
-
:filter => filter
-
}
-
ld_link_mh = link_def_idhs.first.create_childMH(:link_def_link)
-
get_objs(ld_link_mh,sp_hash)
-
end
-
-
# ports are augmented with link def under :link_def key
-
1
def self.find_possible_connections(unconnected_aug_ports,output_aug_ports)
-
ret = Array.new
-
output_aug_ports.each{|r|r.set_port_info!()}
-
set_link_def_links!(unconnected_aug_ports)
-
opts = {:port_info_is_set=>true,:link_def_links_are_set=>true}
-
unconnected_aug_ports.each do |unc_port|
-
ret += unc_port[:link_def].find_possible_connection(unc_port,output_aug_ports,opts)
-
end
-
ret
-
end
-
# unc_aug_port and output_aug_ports have keys :node
-
1
def find_possible_connection(unc_aug_port,output_aug_ports,opts={})
-
ret = Array.new
-
unless opts[:port_info_is_set]
-
output_aug_ports.each{|r|r.set_port_info!()}
-
end
-
unless opts[:link_def_links_are_set]
-
LinkDef.set_link_def_links!(unc_aug_port)
-
end
-
-
unc_aug_port.set_port_info!()
-
(unc_aug_port[:link_def][:link_def_links]||[]).each do |ld_link|
-
matches = ld_link.ret_matches(unc_aug_port,output_aug_ports)
-
ret += matches
-
end
-
ret
-
end
-
-
1
def self.set_link_def_links!(aug_ports)
-
aug_ports = [aug_ports] unless aug_ports.kind_of?(Array)
-
ndx_link_defs = aug_ports.inject(Hash.new) do |h,r|
-
ld = r[:link_def]
-
h.merge(ld[:id] => ld)
-
end
-
ld_link_cols = [:id,:group_id,:display_name,:type,:position,:remote_component_type,:link_def_id]
-
ld_links = get_link_def_links(ndx_link_defs.values.map{|r|r.id_handle()},:cols => ld_link_cols)
-
ld_links.each do |r|
-
(ndx_link_defs[r[:link_def_id]][:link_def_links] ||= Array.new) << r
-
end
-
nil
-
end
-
-
end
-
end
-
-
2
module DTK; class LinkDef
-
1
class AutoComplete
-
# TODO: AUTO-COMPLETE-LINKS: this needs to be enhanced to be a general mechanism to auto complete links
-
1
def self.create_internal_links(node,component,node_link_defs_info)
-
# get link_defs in node_link_defs_info that relate to internal links not linked already that connect to component
-
# on either end. what is returned are link defs annotated with their possible links
-
relevant_link_defs = get_annotated_internal_link_defs(component,node_link_defs_info)
-
return if relevant_link_defs.empty?
-
# for each link def with multiple possibel link defs find the match;
-
# TODO: find good mechanism to get user input if there is a choice such as whether it is internal or external
-
# below is exeperimenting with passing in "stratagy" object, which for example can indicate to make all "internal_external internal"
-
strategy = {:internal_external_becomes_internal => true,:select_first => true}
-
parent_idh = component.id_handle.get_parent_id_handle_with_auth_info()
-
attr_links = Array.new
-
relevant_link_defs.each do |link_def|
-
if link_def_link = choose_internal_link(link_def,link_def[:possible_links],link_def[:component],strategy)
-
link_def_context = LinkDef::Context.create(link_def_link,node_link_defs_info)
-
link_def_link.attribute_mappings.each do |attr_mapping|
-
attr_links << attr_mapping.ret_links__clone_if_needed(link_def_context).merge(:type => "internal")
-
end
-
end
-
end
-
AttributeLink.create_attribute_links(parent_idh,attr_links)
-
end
-
-
1
private
-
1
def self.choose_internal_link(link_def,possible_links,link_base_cmp,strategy)
-
# TODO: mostly stubbed fn
-
# TODO: need to check if has contraint
-
ret = nil
-
return ret if possible_links.empty?
-
raise Error.new("only select_first stratagy currently implemented") unless strategy[:select_first]
-
ret = possible_links.first
-
if ret[:type] == "internal_external"
-
raise Error.new("only strategy internal_external_becomes_internal implemented") unless stratagy[:internal_external_becomes_internal]
-
end
-
link_base_cmp.update_object!(:component_type)
-
ret.merge(:local_component_type => link_base_cmp[:component_type])
-
end
-
-
1
def self.get_annotated_internal_link_defs(component,node_link_defs_info)
-
ret = Array.new
-
# shortcut; no links to create if less than two internal ports
-
return ret if node_link_defs_info.size < 2
-
-
#### get relevant link def possible links
-
# find all link def ids that can be internal, local, and not connected already
-
component_id = component.id
-
component_type = (component.update_object!(:component_type))[:component_type]
-
relevant_link_def_ids = Array.new
-
cmp_link_def_ids = Array.new # subset of above on this component
-
ndx_relevant_link_defs = Hash.new #for splicing in possible_links TODO: see if more efficient to get possible_links
-
# in intial call to get node_link_defs_info
-
# these are the ones for which the possible links shoudl be found
-
node_link_defs_info.each do |r|
-
port = r[:port]
-
if port.nil?
-
Log.info("TODO: Check if port.nil? is an error in .get_annotated_internal_link_defs")
-
next
-
end
-
link_def = r[:link_def]
-
component = r[:component]
-
if %w{component_internal component_internal_external}.include?(port[:type]) and
-
link_def[:local_or_remote] == "local" and
-
not port[:connected]
-
link_def_id = link_def[:id]
-
relevant_link_def_ids << link_def_id
-
ndx_relevant_link_defs[link_def_id] = link_def.merge(:component => component)
-
cmp_link_def_ids << link_def_id if link_def[:component_component_id] == component_id
-
end
-
end
-
return ret if relevant_link_def_ids.empty?
-
-
# get relevant possible_link link defs; these are ones that
-
# are children of relevant_link_def_ids and
-
# internal_external have link_def_id in cmp_link_def_ids or remote_component_type == component_type
-
sp_hash = {
-
:cols => [:link_def_id, :remote_component_type,:position,:content,:type],
-
:filter => [:and, [:oneof, :type, %w{internal internal_external}],
-
[:oneof, :link_def_id, relevant_link_def_ids],
-
[:or, [:eq,:remote_component_type,component_type],
-
[:oneof, :link_def_id,cmp_link_def_ids]]],
-
:order_by => [{:field => :position, :order => "ASC"}]
-
}
-
poss_links = Model.get_objs(component.model_handle(:link_def_link),sp_hash)
-
return ret if poss_links.empty?
-
# splice in possible links
-
poss_links.each do |poss_link|
-
(ndx_relevant_link_defs[poss_link[:link_def_id]][:possible_links] ||= Array.new) << poss_link
-
end
-
-
# relevant link defs are ones that are in ndx_relevant_link_defs_info and have a possible link
-
ret = ndx_relevant_link_defs.reject{|k,v|not v.has_key?(:possible_links)}.values
-
ret
-
end
-
-
end
-
end; end
-
-
1
module DTK
-
1
class LinkDef::Context
-
1
r8_nested_require('context','term_mappings')
-
1
r8_nested_require('context','node_mappings')
-
1
r8_nested_require('context','value')
-
-
1
def self.create(link,link_defs_info)
-
new(link,link_defs_info)
-
end
-
-
1
def initialize(link,link_defs_info)
-
@link = link
-
@component_mappings = component_mappings(link_defs_info)
-
@node_mappings = NodeMappings.create_from_component_mappings(@component_mappings)
-
-
@component_attr_index = Hash.new
-
# @term_mappings has element for each component, component attribute and node attribute
-
@term_mappings = TermMappings.create_and_update_cmp_attr_index(
-
@node_mappings,
-
@component_attr_index,
-
@link.attribute_mappings,
-
@component_mappings)
-
# these two function set all the component and attribute refs populated above
-
@term_mappings.set_components!(@link,@component_mappings)
-
@term_mappings.set_attribute_values!(@link,link_defs_info,@node_mappings)
-
end
-
1
private :initialize
-
-
# returns array of LinkDef::Link::AttributeMapping::Augmented
-
1
def aug_attr_mappings__clone_if_needed(opts={})
-
@link.attribute_mappings().inject(Array.new) do |ret,am|
-
ret + am.aug_attr_mappings__clone_if_needed(self,opts)
-
end
-
end
-
-
1
def find_attribute_object?(term_index)
-
@term_mappings.find_attribute_object?(term_index)
-
end
-
-
1
def remote_node()
-
@node_mappings.remote
-
end
-
1
def local_node()
-
@node_mappings.local
-
end
-
-
1
def temporal_order()
-
@link[:temporal_order]
-
end
-
-
1
def add_component_ref_and_value!(component_type,component)
-
@term_mappings.add_ref_component!(component_type).set_component_value!(component)
-
# update all attributes that ref this component
-
cmp_id = component[:id]
-
attrs_to_get = {cmp_id => {:component => component, :attribute_info => @component_attr_index[component_type]}}
-
get_and_update_component_attributes!(attrs_to_get)
-
end
-
-
1
private
-
1
def component_mappings(link_defs_info)
-
local_cmp_type = @link[:local_component_type]
-
local_cmp = get_component(local_cmp_type,link_defs_info)
-
remote_cmp_type = @link[:remote_component_type]
-
remote_cmp = get_component(remote_cmp_type,link_defs_info)
-
{:local => local_cmp, :remote => remote_cmp}
-
end
-
-
1
def get_component(component_type,link_defs_info)
-
match = link_defs_info.find{|r|component_type == r[:component][:component_type]}
-
unless ret = match && match[:component]
-
Log.error("component of type #{component_type} not found in link_defs_info")
-
end
-
ret
-
end
-
end
-
end
-
1
module DTK
-
1
class LinkDef::Context
-
1
class NodeMappings < Hash
-
1
def initialize(local,remote=nil)
-
super()
-
replace(:local => local, :remote => remote||local)
-
end
-
1
private :initialize
-
-
1
def self.create_from_component_mappings(cmp_mappings)
-
ndx_node_ids = cmp_mappings.inject({}){|h,(k,v)|h.merge(k => v[:node_node_id])}
-
node_mh = cmp_mappings[:local].model_handle(:node)
-
ndx_node_info = Hash.new
-
Node::TargetRef.get_ndx_linked_target_refs(node_mh,ndx_node_ids.values.uniq).each_pair do |node_id,tr_info|
-
node = tr_info.node
-
ndx = node.id
-
if node.is_node_group?
-
node = ServiceNodeGroup::Cache.create_as(node,tr_info.target_refs)
-
else
-
#switch to pointing to target ref if it exists
-
unless tr_info.target_refs.empty?
-
if tr_info.target_refs.size > 1
-
Log.error("Unexpected that tr_info.target_refs.size > 1")
-
end
-
node = tr_info.target_refs.first
-
end
-
end
-
ndx_node_info.merge!(ndx => node)
-
end
-
new(ndx_node_info[ndx_node_ids[:local]],ndx_node_info[ndx_node_ids[:remote]])
-
end
-
-
1
def is_internal?()
-
local[:id] == remote[:id]
-
end
-
1
def node_group_members()
-
ret = Array.new
-
if local.is_node_group?()
-
ret << {:endpoint => :local, :nodes => local[:target_refs]}
-
end
-
if remote.is_node_group?()
-
ret << {:endpoint => :remote, :nodes => remote[:target_refs]}
-
end
-
ret
-
end
-
1
def local()
-
self[:local]
-
end
-
1
def remote()
-
self[:remote]
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class LinkDef::Context
-
1
class TermMappings < Hash
-
1
def self.create_and_update_cmp_attr_index(node_mappings,component_attr_index,attribute_mappings,cmp_mappings)
-
ret = TermMappings.new()
-
ret.update_this_and_cmp_attr_index(node_mappings,component_attr_index,attribute_mappings,cmp_mappings)
-
end
-
1
def update_this_and_cmp_attr_index(node_mappings,component_attr_index,attribute_mappings,cmp_mappings)
-
add_component_refs!(cmp_mappings)
-
add_attribute_refs!(node_mappings,component_attr_index,attribute_mappings)
-
self
-
end
-
-
1
def set_components!(link,cmp_mappings)
-
values.each do |v|
-
v.set_component_remote_and_local_value!(link,cmp_mappings)
-
end
-
end
-
-
1
def set_attribute_values!(link,link_defs_info,node_mappings)
-
attrs_to_set = component_attributes_to_set()
-
get_and_update_component_attributes!(attrs_to_set)
-
-
attrs_to_set = node_attributes_to_set(node_mappings)
-
get_and_update_node_attributes!(attrs_to_set)
-
end
-
-
1
def find_attribute_object?(term_index)
-
self[term_index]
-
end
-
-
1
def get_and_update_component_attributes!(attrs_to_set)
-
return if attrs_to_set.empty?
-
from_db = Component.get_virtual_attributes__include_mixins(attrs_to_set,attribute_fields_to_get())
-
attrs_to_set.each do |component_id,hash_val|
-
next unless cmp_info = from_db[component_id]
-
hash_val[:attribute_info].each do |a|
-
attr_name = a[:attribute_name]
-
a[:value_object].set_attribute_value!(cmp_info[attr_name]) if cmp_info.has_key?(attr_name)
-
end
-
end
-
end
-
-
1
def get_and_update_node_attributes!(attrs_to_set)
-
return if attrs_to_set.empty?
-
from_db = Node.get_virtual_attributes(attrs_to_set,attribute_fields_to_get())
-
attrs_to_set.each do |node_id,hash_val|
-
next unless node_info = from_db[node_id]
-
hash_val[:attribute_info].each do |a|
-
attr_name = a[:attribute_name]
-
a[:value_object].set_attribute_value!(node_info[attr_name]) if node_info.has_key?(attr_name)
-
end
-
end
-
end
-
-
1
private
-
1
def add_component_refs!(cmp_mappings)
-
cmp_mappings.each_value{|cmp|add_component_ref!(cmp)}
-
end
-
-
1
def add_attribute_refs!(node_mappings,component_attr_index,attribute_mappings)
-
attribute_mappings.each do |am|
-
add_ref!(node_mappings,component_attr_index,am[:input])
-
add_ref!(node_mappings,component_attr_index,am[:output])
-
end
-
end
-
-
1
def add_component_ref!(component)
-
component_type = component[:component_type]
-
term_index = component_type
-
self[term_index] ||= Value::Component.new(:component_type => component_type)
-
end
-
-
1
def attribute_fields_to_get()
-
# TODO: prune which of these data type attributes needed; longer term is to clean them up to be normalized
-
[:id,:value_derived,:value_asserted,:data_type,:semantic_data_type,:semantic_type,:semantic_type_summary]
-
end
-
-
1
def component_attributes_to_set()
-
ret = Hash.new
-
each_value do |v|
-
if v.kind_of?(Value::ComponentAttribute)
-
# v.component can be null if refers to component created by an event
-
next unless cmp = v.component
-
a = (ret[cmp[:id]] ||= {:component => cmp, :attribute_info => Array.new})[:attribute_info]
-
a << {:attribute_name => v.attribute_ref.to_s, :value_object => v}
-
end
-
end
-
ret
-
end
-
1
def node_attributes_to_set(node_mappings)
-
ret = Hash.new
-
each_value do |v|
-
if v.kind_of?(Value::NodeAttribute)
-
unless node = node_mappings[v.node_ref.to_sym]
-
Log.error("cannot find node associated with node ref")
-
next
-
end
-
a = (ret[node[:id]] ||= {:node => node, :attribute_info => Array.new})[:attribute_info]
-
a << {:attribute_name => v.attribute_ref.to_s, :value_object => v}
-
end
-
end
-
ret
-
end
-
-
1
def add_ref!(node_mappings,component_attr_index,term)
-
# TODO: see if there can be name conflicts between different types in which nmay want to prefix with
-
# type (type's initials, like CA for componanet attribute)
-
term_index = term[:term_index]
-
value = self[term_index] ||= Value.create(term,:node_mappings => node_mappings)
-
value.update_component_attr_index!(component_attr_index)
-
end
-
-
end
-
end
-
end
-
1
module DTK
-
1
class LinkDef::Context
-
1
class Value
-
1
r8_nested_require('value','component')
-
1
r8_nested_require('value','attribute_mixin') # must be before component_attribute and node_attribute
-
1
r8_nested_require('value','component_attribute')
-
1
r8_nested_require('value','node_attribute')
-
1
attr_reader :component
-
1
def initialize(component_ref)
-
@component_ref = component_ref
-
@component = nil
-
end
-
-
1
def self.create(term,opts={})
-
case term[:type].to_sym
-
when :component
-
Component.new(term)
-
when :component_attribute
-
ComponentAttribute.new(term,opts)
-
when :node_attribute
-
NodeAttribute.new(term,opts)
-
else
-
Log.error("unexpected type #{type}")
-
nil
-
end
-
end
-
-
# can be overwritten
-
1
def is_node_attribute?()
-
false
-
end
-
-
# can be overwritten
-
1
def get_ng_member_attributes__clone_if_needed(opts={})
-
Array.new
-
end
-
-
1
def set_component_remote_and_local_value!(link,cmp_mappings)
-
return if @component_ref.nil? #would fire if this is a NodeAttribute
-
if @component_ref == link[:local_component_type]
-
@component = cmp_mappings[:local]
-
elsif @component_ref == link[:remote_component_type]
-
@component = cmp_mappings[:remote]
-
end
-
end
-
-
1
def set_component_value!(component)
-
@component = component
-
end
-
-
# no op unless overwritetn
-
1
def update_component_attr_index!(component_attr_index)
-
end
-
# overwritten
-
1
def value()
-
end
-
end
-
end
-
end
-
2
module DTK; class LinkDef::Context
-
1
class Value
-
1
module AttributeMixin
-
1
def set_attribute_value!(attribute)
-
@attribute = attribute
-
end
-
1
def value()
-
@attribute
-
end
-
1
def is_array?()
-
@attribute[:semantic_type_object].is_array?()
-
end
-
1
def node()
-
@node ||= ret_node()
-
end
-
1
def on_node_group?
-
node().is_node_group?()
-
end
-
1
def service_node_group_cache()
-
ret = node()
-
unless ret.is_node_group?()
-
raise Error.new("Shoud not be called if not node group")
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
2
module DTK; class LinkDef::Context
-
1
class Value
-
1
class Component < self
-
1
def initialize(term)
-
super(term[:component_type])
-
end
-
1
def value()
-
@component
-
end
-
end
-
end
-
end; end
-
2
module DTK; class LinkDef::Context
-
1
class Value
-
1
class ComponentAttribute < self
-
1
include AttributeMixin
-
1
attr_reader :attribute_ref
-
1
def initialize(term,opts={})
-
super(term[:component_type])
-
@attribute_ref = term[:attribute_name]
-
@node_mappings = opts[:node_mappings]
-
end
-
-
1
def pp_form()
-
attr = @attribute.get_field?(:display_name)
-
cmp = @component.get_field?(:display_name)
-
node = node().get_field?(:node)
-
"#{node}/#{cmp}/#{attr}"
-
end
-
-
1
def update_component_attr_index!(component_attr_index)
-
p = component_attr_index[@component_ref] ||= Array.new
-
p << {:attribute_name => @attribute_ref, :value_object => self}
-
end
-
-
# this should only be called on a node group
-
# it returns the associated attributes on the node goup members
-
1
def get_ng_member_attributes__clone_if_needed(opts={})
-
node_group_attrs = service_node_group_cache().get_component_attributes(@component,opts)
-
attr_name = @attribute.get_field?(:display_name)
-
node_group_attrs.select{|a|a[:display_name] == attr_name}
-
end
-
-
1
private
-
1
def ret_node()
-
node_id = @component[:node_node_id]
-
@node_mappings.values.find{|n|n[:id] == node_id}
-
end
-
end
-
end
-
end; end
-
2
module DTK; class LinkDef::Context
-
1
class Value
-
1
class NodeAttribute < self
-
1
include AttributeMixin
-
1
attr_reader :attribute_ref,:node_ref
-
1
def initialize(term,opts={})
-
super(nil)
-
@node_ref = term[:node_name]
-
@attribute_ref = term[:attribute_name]
-
@node_mappings = opts[:node_mappings]
-
end
-
-
1
def pp_form()
-
attr = @attribute.get_field?(:display_name)
-
node = node().get_field?(:display_name)
-
"#{node}/#{attr}"
-
end
-
-
1
def is_node_attribute?()
-
true
-
end
-
-
# this should only be called on a node group
-
# it returns the associated attributes on the node goup members
-
1
def get_ng_member_attributes__clone_if_needed(opts={})
-
node_group_attrs = service_node_group_cache().get_node_attributes(opts)
-
attr_name = @attribute.get_field?(:display_name)
-
node_group_attrs.select{|a|a[:display_name] == attr_name}
-
end
-
-
1
private
-
1
def ret_node()
-
@node_mappings[@node_ref.to_sym]
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class LinkDef
-
# Each element has form
-
# <Assemby::Template>
-
# id: ID
-
# node: NODE
-
# component_ref: ComponentRef
-
# nested_component: ComponentTemplate
-
# link_def:
-
# <LinkDef>
-
# link_def_links:
-
# - LinkDef::Link
-
1
class Info < Array
-
1
def self.component_ref_cols()
-
ComponentRef.common_cols()
-
end
-
1
def self.nested_component_cols()
-
1
[:id,:display_name,:component_type, :extended_base, :implementation_id, :node_node_id,:only_one_per_node]
-
end
-
-
1
def self.get_link_def_info(assembly_template)
-
link_defs_info = new(assembly_template.get_objs(:cols => [:template_link_defs_info]))
-
link_defs_info.add_link_def_links!()
-
end
-
-
1
def add_link_def_links!()
-
link_defs = link_defs()
-
return self if link_defs.empty?()
-
sp_hash = {
-
:cols => [:id,:group_id,:link_def_id,:remote_component_type],
-
:filter => [:oneof, :link_def_id, link_defs.map{|ld|ld[:id]}]
-
}
-
link_def_link_mh = link_defs.first.model_handle(:link_def_link)
-
ndx_link_def_links = Model.get_objs(link_def_link_mh,sp_hash).inject(Hash.new) do |h,r|
-
h.merge(r[:link_def_id] => r)
-
end
-
-
link_defs.each do |link_def|
-
if link = ndx_link_def_links[link_def[:id]]
-
(link_def[:link_def_links] ||= Array.new) << link
-
end
-
end
-
self
-
end
-
-
# signature generate_link_def_link_pairs do |link_def,link|
-
1
def generate_link_def_link_pairs(&body)
-
ndx_ld_links_mark = Hash.new
-
link_defs().each do |link_def|
-
ndx = link_def[:id]
-
unless ndx_ld_links_mark[ndx]
-
ndx_ld_links_mark[ndx] = true
-
(link_def[:link_def_links]||{}).each{|link|body.call(link_def,link)}
-
end
-
end
-
end
-
-
1
def link_defs()
-
ret = Array.new
-
each do |ld_info|
-
if link_def = ld_info[:link_def]
-
ret << link_def
-
end
-
end
-
ret
-
end
-
-
end
-
end
-
end
-
-
2
module DTK; class LinkDef
-
1
class Link < Model
-
1
r8_nested_require('link','attribute_mapping')
-
-
1
def self.common_columns()
-
2
[:id,:group_id,:display_name,:remote_component_type,:position,:content,:type,:temporal_order]
-
end
-
-
1
def matching_attribute_mapping?(dep_attr_pattern,antec_attr_pattern)
-
attribute_mappings().each do |am|
-
if ret = am.match_attribute_patterns?(dep_attr_pattern,antec_attr_pattern)
-
return ret
-
end
-
end
-
nil
-
end
-
-
1
def add_attribute_mapping!(am_serialized_form)
-
updated_attr_mappings = attribute_mappings() + [LinkDef.parse_serialized_form_attribute_mapping(am_serialized_form)]
-
update_attribute_mappings!(updated_attr_mappings)
-
self
-
end
-
-
# TODO: when add cardinality constraints on links, would check it here
-
# assuming that augmented ports have :port_info
-
1
def ret_matches(in_aug_port,out_aug_ports)
-
ret = Array.new
-
cmp_type = self[:remote_component_type]
-
out_aug_ports.each do |out_port|
-
if out_port[:port_info][:component_type] == cmp_type
-
match =
-
case self[:type]
-
when "external"
-
in_aug_port[:node_node_id] != out_port[:node_node_id]
-
when "internal"
-
in_aug_port[:node_node_id] == out_port[:node_node_id]
-
else
-
raise Error.new("unexpected type for LinkDef::Link object")
-
end
-
if match
-
ret << {:input_port => in_aug_port,:output_port => out_port}
-
end
-
end
-
end
-
ret
-
end
-
-
1
def self.create_from_serialized_form(link_def_idh,possible_links)
-
rows = parse_possible_links(possible_links)
-
link_def_id = link_def_idh.get_id()
-
rows.each_with_index do |r,i|
-
r[:position] = i+1
-
r[:link_def_id] = link_def_id
-
end
-
create_from_rows(model_handle,rows)
-
end
-
-
# craetes attribute links and can clone if needed attributes on a service node group to its members
-
-
1
def update_attribute_mappings!(new_attribute_mappings)
-
ret = self[:attribute_mappings] = new_attribute_mappings
-
self[:content] ||= Hash.new
-
self[:content][:attribute_mappings] = ret
-
update({:content => self[:content]},:convert => true)
-
ret
-
end
-
-
1
def attribute_mappings()
-
# TODO: may convert to using @attribute_mappings; need to make sure no side-effects
-
self[:attribute_mappings] ||= (self[:content][:attribute_mappings]||[]).map{|am|AttributeMapping.reify(am)}
-
end
-
-
1
def on_create_events()
-
self[:on_create_events]||= ((self[:content][:events]||{})[:on_create]||[]).map{|ev|Event.create(ev,self)}
-
end
-
-
1
class Event < HashObject
-
1
def self.create(event,link_def_link)
-
case event[:event_type]
-
when "extend_component" then EventExtendComponent.new(event,link_def_link)
-
else
-
raise Error.new("unexpecetd event type")
-
end
-
end
-
1
def process!(context)
-
raise Error.new("Needs to be overwritten")
-
end
-
end
-
-
1
class EventExtendComponent < Event
-
1
def initialize(event,link_def_link)
-
base_cmp = link_def_link[event[:node] == "remote" ? :remote_component_type : :local_component_type]
-
super(event.merge(:base_component => base_cmp))
-
end
-
-
1
def process!(context)
-
raise Error.new("deprecated context.find_component")
-
# base_component = context.find_component(self[:base_component])
-
raise Error.new("cannot find component with ref #{self[:base_component]} in context") unless base_component
-
component_extension = base_component.get_extension_in_library(self[:extension_type])
-
raise Error.new("cannot find library extension of type #{self[:extension_type]} to #{self[:base_component]} in library") unless component_extension
-
-
# find node to clone it into
-
node = (self[:node] == "local") ? context.local_node : context.remote_node
-
raise Error.new("cannot find node of type #{self[:node]} in context") unless node
-
-
# clone component into node
-
override_attrs = {:from_on_create_event => true}
-
# TODO: may put in flags to tell clone operation not to do any constraint checking
-
clone_opts = {:ret_new_obj_with_cols => [:id,:display_name,:extended_base,:implementation_id]}
-
new_cmp = node.clone_into(component_extension,override_attrs,clone_opts)
-
-
# if alias is given, update context to reflect this
-
if self[:alias]
-
context.add_component_ref_and_value!(self[:alias],new_cmp)
-
end
-
end
-
-
1
private
-
1
def validate_top_level(hash)
-
raise Error.new("node is set incorrectly") if hash[:node] and not [:local,:remote].include?(hash[:node].to_sym)
-
raise Error.new("no extension_type is given") unless hash[:extension_type]
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class LinkDef::Link
-
1
class AttributeMapping < HashObject
-
1
r8_nested_require('attribute_mapping','node_group_processor')
-
1
r8_nested_require('attribute_mapping','augmented')
-
1
r8_nested_require('attribute_mapping','parse_helper')
-
-
1
def self.reify(object)
-
if object.kind_of?(AttributeMapping)
-
object
-
elsif object.kind_of?(Hash)
-
new(object)
-
else
-
raise Error.new("Unexpected object type (#{object.class})")
-
end
-
end
-
-
1
def aug_attr_mappings__clone_if_needed(link_def_context,opts={})
-
ret = Array.new
-
err_msgs = Array.new
-
input_attr_obj,input_path = get_context_attr_obj_with_path(err_msgs,:input,link_def_context)
-
output_attr_obj,output_path = get_context_attr_obj_with_path(err_msgs,:output,link_def_context)
-
unless err_msgs.empty?
-
err_msg = err_msgs.join(" and ").capitalize
-
if opts[:raise_error]
-
raise ErrorUsage.new(err_msg)
-
else
-
Log.error(err_msg)
-
return ret
-
end
-
end
-
-
attr_and_path_info = {
-
:input_attr_obj => input_attr_obj,
-
:input_path => input_path,
-
:output_attr_obj => output_attr_obj,
-
:output_path => output_path
-
}
-
NodeGroupProcessor.aug_attr_mappings__clone_if_needed(self,link_def_context,attr_and_path_info,opts)
-
end
-
-
# returns a hash with args if this is a function that takes args
-
#
-
#
-
1
def parse_function_with_args?()
-
ParseHelper::VarEmbeddedInText.isa?(self) # || other ones we add
-
end
-
-
1
def match_attribute_patterns?(dep_attr_pattern,antec_attr_pattern)
-
if dep_attr_pattern.match_attribute_mapping_endpoint?(self[:input]) and
-
antec_attr_pattern.match_attribute_mapping_endpoint?(self[:output])
-
self
-
end
-
end
-
-
1
private
-
# returns [attribute_object,unravel_path] and updates error if any error
-
1
def get_context_attr_obj_with_path(err_msgs,dir,context)
-
attr_object = context.find_attribute_object?(self[dir][:term_index])
-
unless attr_object && attr_object.value
-
err_msg =
-
if attr_pp_form = pp_form(dir)
-
"attribute matching link def term (#{attr_pp_form}) does not exist"
-
else
-
Log.error("unexpected that have no pp form for: #{inspect}")
-
"attribute matching link def term does not exist"
-
end
-
err_msgs << err_msg
-
end
-
index_map_path = self[dir][:path]
-
# TODO: if treat :create_component_index need to put in here process_unravel_path and process_create_component_index (from link_defs.rb)
-
[attr_object,index_map_path && AttributeLink::IndexMapPath.create_from_array(index_map_path)]
-
end
-
-
1
def pp_form(direction)
-
if attr = self[direction]
-
if attr_name = attr[:attribute_name]
-
if cmp_type = attr[:component_type]
-
# meaning that it is a component attribute ref
-
"#{Component.component_type_print_form(cmp_type)}.#{attr_name}"
-
elsif attr[:node_name]
-
"node.#{attr_name}"
-
end
-
end
-
end
-
end
-
-
end
-
end
-
end
-
-
2
module DTK; class LinkDef::Link
-
1
class AttributeMapping
-
# attribute mapping augmented with context
-
1
class Augmented < Hash
-
1
def initialize(attribute_mapping,input_attr,input_path,output_attr,output_path)
-
super()
-
@attribute_mapping = attribute_mapping
-
merge!(:input_id => input_attr.id,:output_id => output_attr.id)
-
merge!(:input_path => input_path) if input_path
-
merge!(:output_path => output_path) if output_path
-
end
-
-
1
def parse_function_with_args?()
-
@attribute_mapping.parse_function_with_args?()
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class LinkDef::Link
-
1
class AttributeMapping
-
# processes service node groups when needed
-
1
class NodeGroupProcessor
-
1
attr_reader :attribute_mapping,:input_path,:output_path
-
1
def initialize(attribute_mapping,link_def_context,attr_and_path_info,opts={})
-
@attribute_mapping = attribute_mapping
-
@link_def_context = link_def_context
-
info = attr_and_path_info # for succinctness
-
@input_attr_obj = info[:input_attr_obj]
-
@input_path = info[:input_path]
-
@output_attr_obj = info[:output_attr_obj]
-
@output_path = info[:output_path]
-
end
-
1
private :initialize
-
1
def input_attr()
-
@input_attr_obj.value()
-
end
-
1
def output_attr()
-
@output_attr_obj.value()
-
end
-
-
# returns Array of Augmented (AttributeMapping) objects
-
# clones component and their attributes from a node group if needed
-
1
def self.aug_attr_mappings__clone_if_needed(attribute_mapping,link_def_context,attr_and_path_info,opts={})
-
new(attribute_mapping,link_def_context,attr_and_path_info,opts).aug_attr_mappings__clone_if_needed(opts)
-
end
-
-
1
def aug_attr_mappings__clone_if_needed(opts={})
-
ret = Array.new
-
input_attr = input_attr()
-
port_link_id = opts[:port_link_idh] && opts[:port_link_idh].get_id()
-
if cloning_node_group_members_needed?()
-
node_group_attrs = @output_attr_obj.get_ng_member_attributes__clone_if_needed(opts)
-
node_group_attrs.each do |output_attr|
-
ret << ret_single_link(input_attr,output_attr,port_link_id)
-
end
-
else
-
ret << ret_single_link(input_attr,output_attr(),port_link_id)
-
end
-
ret
-
end
-
-
1
private
-
1
def ret_single_link(input_attr,output_attr,port_link_id=nil)
-
ret = Augmented.new(@attribute_mapping,input_attr,@input_path,output_attr,@output_path)
-
if port_link_id
-
ret.merge!(:port_link_id => port_link_id)
-
end
-
ret
-
end
-
-
1
def cloning_node_group_members_needed?()
-
num_ngs = [@input_attr_obj.node,@output_attr_obj.node].inject(0){|r,n|r +(n.is_node_group? ? 1 : 0)}
-
if num_ngs == 0
-
return nil
-
elsif num_ngs == 2
-
raise ErrorUsage.new("Not treating links between components that are both on node groups")
-
end
-
# determine if this manifests as single of multiple links; if single link just pass nil
-
# when this is called there is one node group and one node
-
return nil if @input_attr_obj.on_node_group?()
-
-
# if reach here @output_attr_obj.on_node_group?
-
if @output_attr_obj.is_array?() and @output_path.nil?
-
raise ErrorUsage.new("Not treating attribute mappings from an array attribute on a node group (#{@output_attr_obj.pp_form()})")
-
end
-
if @output_attr_obj.is_node_attribute?() and !@input_attr_obj.is_array?()
-
raise ErrorUsage.new("Node attributes on node groups (#{@output_attr_obj.pp_form()}) must connect to an array attribute, not '#{@input_attr_obj.pp_form()}'")
-
end
-
true
-
end
-
-
end
-
end
-
end; end
-
-
3
module DTK; class LinkDef::Link; class AttributeMapping
-
1
module ParseHelper
-
1
module VarEmbeddedInText
-
1
def self.isa?(am)
-
if output_term_index = (am[:output]||{})[:term_index]
-
if output_var = output_term_index.split('.').last
-
# example abc${output_var}def",
-
if output_var =~ /(^[^\$]*)\$\{[^\}]+\}(.*$)/
-
text_parts = [$1,$2]
-
{
-
:name => :var_embedded_in_text,
-
:constants => {:text_parts => text_parts}
-
}
-
end
-
end
-
end
-
end
-
end
-
end
-
end;end;end
-
2
module DTK; class LinkDef
-
1
module ParseSerializedFormClassMixin
-
1
def parse_serialized_form_local(link_defs,config_agent_type,remote_link_defs,local_cmp_ref=nil)
-
ParseSerializedForm.new(config_agent_type,remote_link_defs,local_cmp_ref).parse(link_defs)
-
end
-
-
# This is used to convert from add hoc link commands into internal form
-
1
def parse_serialized_form_attribute_mapping(mapping)
-
ParseSerializedForm.new().parse_possible_link_attribute_mapping(mapping)
-
end
-
-
1
def parse_from_create_dependency(link_def)
-
ParseSerializedForm.new().parse([link_def])
-
end
-
end
-
-
1
class ParseSerializedForm
-
1
attr_reader :config_agent_type,:remote_link_defs,:local_cmp_ref
-
1
def initialize(config_agent_type=nil,remote_link_defs={},local_cmp_ref=nil)
-
@config_agent_type = config_agent_type
-
@remote_link_defs = remote_link_defs
-
@local_cmp_ref = local_cmp_ref
-
end
-
-
1
def parse(link_defs)
-
link_defs.inject({}) do |h,link_def|
-
link_def_type = link_def["type"]
-
ref = "local_#{link_def_type}"
-
has_external_internal = { #defaults that can be overritten
-
:has_internal_link=>false,
-
:has_external_link=>false
-
}
-
possible_links = parse_possible_links_local(link_def["possible_links"],link_def_type,has_external_internal)
-
el = {
-
:display_name => ref,
-
:local_or_remote => "local",
-
:link_type => link_def_type,
-
:link_def_link => possible_links
-
}.merge(has_external_internal)
-
el.merge!(:required => link_def["required"]) if link_def.has_key?("required")
-
el.merge!(:description => link_def["description"]) if link_def.has_key?("description")
-
h.merge(ref => el)
-
end
-
end
-
-
1
private
-
1
def parse_possible_links_local(possible_links,link_def_type,has_external_internal)
-
position = 0
-
possible_links.inject({}) do |h,possible_link|
-
remote_component_type = possible_link.keys.first
-
possible_link_info = possible_link.values.first
-
possible_link_type = possible_link_info["type"]
-
-
add_remote_link_def?(remote_link_defs,remote_component_type,link_def_type,possible_link_type)
-
-
has_external_internal[:has_internal_link] = true if %w{internal internal_external}.include?(possible_link_type)
-
has_external_internal[:has_external_link] = true if %w{external internal_external}.include?(possible_link_type)
-
-
position += 1
-
ref = "#{remote_component_type}___#{position.to_s}" #to make sure unqiue for case when same remote type
-
el = {
-
:display_name => remote_component_type,
-
:remote_component_type => remote_component_type,
-
:position => position,
-
:content => parse_possible_link_content(possible_link_info),
-
:type => possible_link_type
-
}
-
if order = possible_link_info["order"]
-
el.merge!(:temporal_order => order)
-
end
-
h.merge(ref => el)
-
end
-
end
-
-
1
def add_remote_link_def?(remote_link_defs,remote_component_type,link_def_type,possible_link_type)
-
pointer = remote_link_defs[remote_component_type] ||= Hash.new
-
ref = "remote_#{link_def_type}"
-
pointer[ref] ||= {
-
:display_name => ref,
-
:config_agent_type => config_agent_type,
-
:local_or_remote => "remote",
-
:link_type => link_def_type,
-
}
-
pointer[ref][:has_internal_link] = true if %w{internal internal_external}.include?(possible_link_type)
-
pointer[ref][:has_external_link] = true if %w{external internal_external}.include?(possible_link_type)
-
if local_cmp_ref and pointer[ref][:has_external_link]
-
pointer[ref].merge!(:local_cmp_ref => local_cmp_ref)
-
end
-
end
-
-
1
def parse_possible_link_content(possible_link)
-
ret = Hash.new
-
events = possible_link["events"]||[]
-
events.each do |evs|
-
parsed_evs = parse_possible_link_on_create_events(evs)
-
(ret[:events] ||= Hash.new).merge!(parsed_evs) if parsed_evs
-
end
-
attribute_mappings = possible_link["attribute_mappings"]||[]
-
unless attribute_mappings.empty?
-
ret[:attribute_mappings] = attribute_mappings.map{|am|parse_possible_link_attribute_mapping(am)}
-
end
-
ret
-
end
-
-
1
def parse_possible_link_attribute_mapping(mapping)
-
{
-
:output => parse_attribute_term(mapping.keys.first),
-
:input => parse_attribute_term(mapping.values.first)
-
}
-
end
-
1
public :parse_possible_link_attribute_mapping
-
-
1
def parse_possible_link_on_create_events(events)
-
trigger = events.first
-
case trigger
-
when "on_create_link"
-
{:on_create => parse_events(events[1])}
-
else
-
Log.error("unexpected event trigger: #{trigger}")
-
nil
-
end
-
end
-
-
1
def parse_events(events)
-
ret = Array.new
-
events.each do |ev|
-
ev_type = ev.keys.first
-
ev_content = ev.values.first
-
case ev_type
-
when "extend_component"
-
parsed_ev = parse_event_extend_component(ev_content)
-
ret << parsed_ev if parsed_ev
-
else
-
Log.error("unexpected event type: #{ev_type}")
-
end
-
end
-
ret
-
end
-
-
1
def parse_event_extend_component(ev_content)
-
remote_or_local = ev_content[:node] || :remote
-
ret = {
-
:event_type => "extend_component",
-
:node => remote_or_local,
-
:extension_type => ev_content["extension_type"],
-
}
-
ret.merge!(:alias => ev_content["alias"]) if ev_content.has_key?("alias")
-
ret
-
end
-
-
# returns node_name, component_name, attribute_name, path; where component_name xor node_name is null depending on whether it is a node or component attribute
-
1
def parse_attribute_term(term_x)
-
ret = Hash.new
-
term = term_x.to_s.gsub(/^:/,"")
-
ret[:term_index] = term
-
split = term.split(SplitPat)
-
-
if split[0] =~ NodeTermRE
-
ret[:type] = "node_attribute"
-
ret[:node_name] = $1
-
elsif split[0] =~ ComponentTermRE
-
ret[:type] = "component_attribute"
-
ret[:component_type] = $1
-
else
-
raise Error.new("unexpected form (#{term_x.inspect})")
-
end
-
-
unless split.size > 1
-
raise Error.new("unexpected form (#{term_x.inspect})")
-
end
-
if split[1] =~ AttributeTermRE
-
ret[:attribute_name] = $1
-
# ABC
-
elsif split[1] =~ /\$\{(.+)\}/
-
ret[:attribute_name] = $1
-
else
-
raise Error.new("unexpected form (#{term_x.inspect})")
-
end
-
-
if split.size > 2
-
ret[:path] = split[2,split.size-2]
-
end
-
ret
-
end
-
-
1
SimpleTokenPat = 'a-zA-Z0-9_-'
-
1
SplitPat = '.'
-
-
1
NodeTermRE = Regexp.new("^(local|remote)_node$")
-
1
ComponentTermRE = Regexp.new("^([#{SimpleTokenPat}]+$)")
-
1
AttributeTermRE = Regexp.new("^([#{SimpleTokenPat}]+$)")
-
-
end
-
end; end
-
-
#################
-
# TBD: will seperate into seperate files
-
# TBD: looks like much shared on relationship between Library, Project, and Deployment (they are all containers; so might refactor as one object that has type)
-
-
# TODO: whatever is going on here, very unclear why ClassMixinDataSourceExtensions is def inside of Model.rb
-
-
1
module XYZ
-
1
module ClassMixinDataSourceExtensions
-
1
@@ds_class_objects ||= Hash.new
-
1
def ds_class_object(ds_type)
-
@@ds_class_objects[self] ||= Hash.new
-
return @@ds_class_objects[self][ds_type] if @@ds_class_objects[self][ds_type]
-
obj_class = Aux.demodulize(self.to_s)
-
obj_type = Aux.underscore(obj_class)
-
require File.expand_path("#{obj_type}/#{ds_type}_#{obj_type}", File.dirname(__FILE__))
-
base_class = DSAdapter.const_get ds_type.to_s.capitalize
-
@@ds_class_objects[self][ds_type] = base_class.const_get obj_class
-
end
-
end
-
-
# TODO: why is this called object?
-
# TBD: re-examine whether current scheme is best way to implement relationship between model top, specfic model classes and the XYZ::model utility class
-
1
class Object < Model
-
1
set_relation_as_top()
-
1
class << self
-
#### Actions
-
# idempotent
-
1
def delete(id_handle,opts={})
-
delete_instance(id_handle,opts) if exists? id_handle
-
end
-
# idempotent
-
1
def create_simple(new_uri,c,opts={})
-
create_simple_instance?(IDHandle[:uri => new_uri,:c => c],opts)
-
end
-
-
# TODO: rewrite using join querey
-
-
1
def get_contained_attribute_ids(id_handle,opts={})
-
parent_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
cmps = get_objects(ModelHandle.new(:c,:component),nil,:parent_id => parent_id)
-
nodes = get_objects(ModelHandle.new(:c,:node),nil,:parent_id => parent_id)
-
(cmps||[]).map{|cmp|cmp.get_contained_attribute_ids(opts)}.flatten() +
-
(nodes||[]).map{|node|node.get_contained_attribute_ids(opts)}.flatten()
-
end
-
-
# TODO: this seems like generic function but specifically works with nodes?
-
# type can be :asserted, :derived or :value
-
1
def get_contained_attributes(type,id_handle,opts={})
-
ret = {}
-
-
parent_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
cmps = get_objects(ModelHandle.new(:c,:component),nil,:parent_id => parent_id)
-
nodes = get_objects(ModelHandle.new(:c,:node),nil,:parent_id => parent_id)
-
-
cmps.each{|cmp|
-
values = cmp.get_contained_attribute_values(type,opts)
-
-
if values
-
ret[:component]||= {}
-
ret[:component][cmp.get_qualified_ref.to_sym] = values
-
end
-
}
-
-
nodes.each{|node|
-
values = node.get_direct_attribute_values(type,opts)
-
if values
-
ret[:node]||= {}
-
ret[:node][node.get_qualified_ref.to_sym] = values
-
end
-
}
-
ret
-
end
-
-
# TBD: temp
-
1
def get_guid(id_handle)
-
id_info = IDInfoTable.get_row_from_id_handle(id_handle)
-
{:guid => IDInfoTable.ret_guid_from_id_info(id_info)}
-
end
-
end
-
end
-
end
-
-
-
-
# TODO: need to cleanup breaking into base_module, component_module, service_module and the DSL related classes
-
# There is overlap between soem service module and otehr moduel code
-
# Right now seems intuitive model is that we have
-
# two types of modules: service module and the rest, the prime being the component module, and that for the rest there is much similarity
-
# for the rest the classes used are
-
1
module DTK
-
# order is important
-
1
r8_nested_require('module','mixins')
-
1
r8_nested_require('module','dsl_parser')
-
1
r8_nested_require('module','external_dependencies')
-
1
r8_nested_require('module','module_dsl_info') #TODO: this will get deprecated when all move over to update_module_output
-
1
r8_nested_require('module','update_module_output')
-
1
r8_nested_require('module','base_module')
-
1
r8_nested_require('module','component_module')
-
1
r8_nested_require('module','service')
-
1
r8_nested_require('module','test')
-
1
r8_nested_require('module','node')
-
1
r8_nested_require('module','branch')
-
1
r8_nested_require('module','version')
-
1
r8_nested_require('module','assembly_module')
-
end
-
1
module DTK
-
1
class AssemblyModule
-
1
extend Aux::CommonClassMixin
-
1
r8_nested_require('assembly_module','component')
-
1
r8_nested_require('assembly_module','service')
-
-
1
def initialize(assembly)
-
@assembly = assembly
-
end
-
-
1
def self.delete_modules?(assembly,opts={})
-
Component.new(assembly).delete_modules?()
-
Service.new(assembly).delete_module?(opts)
-
end
-
-
1
private
-
1
def self.assembly_module_version(assembly)
-
ModuleVersion.ret(assembly)
-
end
-
1
def assembly_module_version(assembly=nil)
-
assembly ||= @assembly
-
unless assembly
-
raise Error.new("@assembly should not be null")
-
end
-
self.class.assembly_module_version(assembly)
-
end
-
end
-
end
-
2
module DTK; class AssemblyModule
-
1
class Component < self
-
1
r8_nested_require('component','ad_hoc_link')
-
1
r8_nested_require('component','attribute')
-
1
r8_nested_require('component','get_for_assembly')
-
-
1
def self.prepare_for_edit(assembly,component_module,opts={})
-
new(assembly).prepare_for_edit(component_module,opts)
-
end
-
1
def prepare_for_edit(component_module,opts={})
-
get_applicable_component_instances(component_module)
-
create_assembly_branch?(component_module,opts)
-
end
-
-
1
def self.component_module_workspace_info(assembly, component_module, opts={})
-
new(assembly).component_module_workspace_info(component_module, opts)
-
end
-
1
def component_module_workspace_info(component_module, opts={})
-
get_applicable_component_instances(component_module,:raise_error_if_empty => true)
-
am_version = assembly_module_version()
-
-
base_branch = component_module.get_workspace_branch_info()
-
raise ErrorNoChangesToModule.new(@assembly, component_module) unless base_branch
-
-
unless local_branch = component_module.get_workspace_module_branch(am_version)
-
create_assembly_branch?(component_module,opts)
-
local_branch = component_module.get_workspace_module_branch(am_version)
-
end
-
-
base_branch.merge!(:version => am_version, :local_branch => local_branch[:display_name], :current_branch_sha => local_branch[:current_sha])
-
base_branch
-
end
-
-
1
def self.finalize_edit(assembly,component_module,module_branch,opts={})
-
new(assembly).finalize_edit(component_module,module_branch,opts)
-
end
-
1
def finalize_edit(component_module,module_branch,opts={})
-
cmp_instances = get_applicable_component_instances(component_module)
-
project_idh = component_module.get_project().id_handle()
-
begin
-
Clone::IncrementalUpdate::Component.new(project_idh,module_branch).update?(cmp_instances,opts)
-
rescue Exception => e
-
if sha = opts[:current_branch_sha]
-
repo = module_branch.get_repo()
-
repo.hard_reset_branch_to_sha(module_branch, sha)
-
module_branch.set_sha(sha)
-
end
-
-
raise e
-
end
-
end
-
-
1
def delete_modules?()
-
am_version = assembly_module_version()
-
# do not want to use assembly.get_component_modules() to generate component_modules because there can be modules taht do not correspond to component instances
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_id],
-
:filter => [:eq,:version,am_version]
-
}
-
component_module_mh = @assembly.model_handle(:component_module)
-
Model.get_objs(@assembly.model_handle(:module_branch),sp_hash).each do |r|
-
unless r[:component_id]
-
# Log.error("Unexpected that #{r.inspect} has :component_id nil; workaround is to delete this module branch")
-
Model.delete_instance(r.id_handle())
-
next
-
end
-
component_module = component_module_mh.createIDH(:id => r[:component_id]).create_object()
-
component_module.delete_version?(am_version)
-
end
-
end
-
-
1
def self.create_component_dependency?(type,assembly,cmp_template,antecedent_cmp_template,opts={})
-
AdHocLink.new(assembly).create_dependency?(type,cmp_template,antecedent_cmp_template,opts)
-
end
-
-
1
def self.promote_module_updates(assembly,component_module,opts={})
-
new(assembly).promote_module_updates(component_module,opts)
-
end
-
1
def promote_module_updates(component_module,opts={})
-
am_version = assembly_module_version()
-
unless branch = component_module.get_workspace_module_branch(am_version)
-
component_module_id = component_module.id()
-
if @assembly.get_component_modules().find{|r|r[:id] == component_module_id}
-
raise ErrorNoChangesToModule.new(@assembly,component_module)
-
else
-
raise ErrorNoComponentsInModule.new(@assembly,component_module)
-
end
-
end
-
unless ancestor_branch = branch.get_ancestor_branch?()
-
raise Error.new("Cannot find ancestor branch")
-
end
-
branch_name = branch[:branch]
-
ancestor_branch.merge_changes_and_update_model?(component_module,branch_name,opts)
-
end
-
-
1
def self.get_for_assembly(assembly,opts={})
-
GetForAssembly.new(assembly).get_for_assembly(opts)
-
end
-
-
# opts can have
-
# :ret_lock_branch_sha - in which cas this wil be set to locked sha if it exists
-
1
def self.validate_component_module_ret_namespace(assembly,module_name,opts={})
-
namespace, name = Namespace.full_module_name_parts?(module_name)
-
types = [:locked_dependencies]
-
if opts[:ret_locked_branch_sha]
-
types << :locked_branch_shas
-
else
-
return namespace if namespace
-
end
-
# TODO: DTK-2014; use modification of ModuleRefs::Lock that passs in module name that looking for
-
module_refs_lock = ModuleRefs::Lock.get(assembly,:types => types)
-
unless namespace ||= module_refs_lock.matching_namespace?(module_name)
-
raise(ErrorUsage.new("No object of type component module with name (#{module_name}) exists"))
-
end
-
if opts[:ret_locked_branch_sha]
-
opts[:ret_locked_branch_sha] = module_refs_lock.matching_locked_branch_sha?(module_name)
-
end
-
namespace
-
end
-
-
1
def self.list_remote_diffs(model_handle, module_id, repo, module_branch, workspace_branch, opts)
-
diffs, diff = [], nil
-
remote_repo_cols = [:id, :display_name, :version, :remote_repos, :dsl_parsed]
-
project_idh = opts[:project_idh]
-
-
sp_hash = {
-
:cols => [:id, :group_id, :display_name, :component_type],
-
:filter => [:and,
-
[:eq, :type, 'component_module'],
-
[:eq, :version, ModuleBranch.version_field_default()],
-
[:eq, :repo_id, repo.id()],
-
[:eq, :component_id, module_id]
-
]
-
}
-
base_branch = Model.get_obj(module_branch.model_handle(), sp_hash)
-
diff = repo.get_local_branches_diffs(module_branch, base_branch, workspace_branch)
-
-
diff.each do |diff_obj|
-
path = "diff --git a/#{diff_obj.a_path} b/#{diff_obj.b_path}\n"
-
diffs << (path + "#{diff_obj.diff}\n")
-
end
-
-
diffs
-
end
-
-
1
private
-
1
def get_for_assembly__augment_name_with_namespace!(cmp_modules)
-
return if cmp_modules.empty?
-
ndx_cmp_modules = cmp_modules.inject(Hash.new){|h,m|h.merge(m[:id] => m)}
-
ComponentModule.ndx_full_module_names(cmp_modules.map{|m|m.id_handle()}).each_pair do |ndx,full_module_name|
-
ndx_cmp_modules[ndx][:display_name] = full_module_name
-
end
-
end
-
-
1
def create_assembly_branch?(component_module,opts={})
-
am_version = assembly_module_version()
-
unless component_module.get_workspace_module_branch(am_version)
-
create_assembly_branch(component_module,am_version,opts)
-
end
-
ret = component_module.get_workspace_branch_info(am_version)
-
if opts[:ret_module_branch]
-
ret[:module_branch_idh].create_object()
-
else
-
ret
-
end
-
end
-
-
1
def create_assembly_branch(component_module,am_version,opts={})
-
base_version = nil
-
component_module.create_new_version(base_version,am_version,opts)
-
end
-
-
1
def get_branch_template(module_branch,cmp_template)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_type],
-
:filter => [:and,[:eq,:module_branch_id,module_branch.id()],
-
[:eq,:type,'template'],
-
[:eq,:node_node_id,nil],
-
[:eq,:component_type,cmp_template.get_field?(:component_type)]]
-
}
-
Model.get_obj(cmp_template.model_handle(),sp_hash) || raise(Error.new("Unexpected that branch_cmp_template is nil"))
-
end
-
-
1
def get_applicable_component_instances(component_module,opts={})
-
assembly_id = @assembly.id()
-
component_module.get_associated_component_instances().select do |cmp|
-
cmp[:assembly_id] == assembly_id
-
end
-
end
-
-
1
class ErrorComponentModule < ErrorUsage
-
1
def initialize(assembly,component_module)
-
@assembly_name = assembly.display_name_print_form()
-
@module_name = component_module.get_field?(:display_name)
-
super(error_msg())
-
end
-
end
-
1
class ErrorNoChangesToModule < ErrorComponentModule
-
1
private
-
1
def error_msg()
-
"Changes to component module (#{@module_name}) have not been made in assembly (#{@assembly_name})"
-
end
-
end
-
1
class ErrorNoComponentsInModule < ErrorComponentModule
-
1
private
-
1
def error_msg()
-
"Assembly (#{@assembly_name}) does not have any components belonging to module (#{@module_name})"
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class AssemblyModule
-
1
class Component
-
1
class AdHocLink < self
-
1
def self.update(assembly,parsed_adhoc_link_info)
-
new(assembly).update(parsed_adhoc_link_info)
-
end
-
1
def update(parsed_adhoc_link_info)
-
parsed_adhoc_links = parsed_adhoc_link_info.links
-
unless parsed_adhoc_links.size == 1
-
raise Error.new("Only implemented #{self}.update when parsed_adhoc_links.size == 1")
-
end
-
parsed_adhoc_link = parsed_adhoc_links.first
-
-
dep_cmp_template = parsed_adhoc_link_info.dep_component_template
-
antec_cmp_template = parsed_adhoc_link_info.antec_component_template
-
-
component_module = dep_cmp_template.get_component_module()
-
module_branch = create_assembly_branch?(component_module,:ret_module_branch=>true)
-
-
opts_create_dep = {
-
:source_attr_pattern => parsed_adhoc_link.attribute_pattern(:source),
-
:target_attr_pattern => parsed_adhoc_link.attribute_pattern(:target),
-
:update_dsl => true
-
}
-
result = create_dependency?(:link,dep_cmp_template,antec_cmp_template,module_branch,opts_create_dep)
-
if result[:component_module_updated]
-
update_cmp_instances_with_modified_template(component_module,module_branch)
-
end
-
result
-
end
-
-
1
def create_dependency?(type,cmp_template,antecedent_cmp_template,module_branch,opts={})
-
result = Hash.new
-
branch_cmp_template = get_branch_template(module_branch,cmp_template)
-
-
if opts[:update_dsl]
-
opts[:update_dsl] = {:module_branch => module_branch}
-
end
-
Model.Transaction do
-
result = dependency_class(type).create_dependency?(branch_cmp_template,antecedent_cmp_template,opts)
-
end
-
result
-
end
-
-
1
private
-
1
def dependency_class(type)
-
case type
-
when :simple then DTK::Dependency::Simple
-
when :link then DTK::Dependency::Link
-
else
-
raise Error.new("Illegal type")
-
end
-
end
-
-
end
-
end
-
end;end
-
2
module DTK; class AssemblyModule
-
1
class Component
-
1
class Attribute < self
-
1
def self.update(assembly,cmp_level_attr_patterns)
-
new(assembly).update(cmp_level_attr_patterns)
-
end
-
1
def update(cmp_level_attr_patterns)
-
# TODO: more efficient to bulk up cmp_level_attr_patterns
-
cmp_level_attr_patterns.map{|ap|update_aux(ap)}
-
end
-
-
1
private
-
1
def update_aux(cmp_level_attr_pattern)
-
cmp_instances = cmp_level_attr_pattern.component_instances()
-
ndx_aug_cmp_templates = Hash.new
-
cmp_instances.each do |cmp|
-
cmp_template = cmp.get_component_template_parent()
-
pntr = ndx_aug_cmp_templates[cmp_template[:id]] ||= {:component_template => cmp_template, :component_instances => Array.new}
-
pntr[:component_instances] << cmp
-
end
-
unless ndx_aug_cmp_templates.size == 1
-
raise Error.new("Not implemented yet when atttribute pattern is associated with more than one component template")
-
end
-
cmp_template = ndx_aug_cmp_templates.values.first[:component_template]
-
-
component_module = cmp_template.get_component_module()
-
module_branch = create_assembly_branch?(component_module,:ret_module_branch=>true)
-
branch_cmp_template = get_branch_template(module_branch,cmp_template)
-
cmp_level_attr_pattern.create_attribute_on_template(branch_cmp_template,:update_dsl => {:module_branch => module_branch})
-
end
-
end
-
end
-
end;end
-
-
2
module DTK; class AssemblyModule
-
1
class Component
-
1
class GetForAssembly < self
-
1
def get_for_assembly(opts={})
-
add_module_branches = opts[:get_version_info]
-
ret = (opts[:recursive] ? get_with_branches_recursive(opts) : get_with_branches(opts))
-
-
add_version_info!(ret) if add_module_branches
-
-
# remove branches; they are no longer needed
-
ret.each{|r|r.delete(:module_branch)}
-
ret
-
end
-
-
1
private
-
# Finds, not just dircctly referenced component modules, but the recursive clouse taking into account all locked component module refs
-
1
def get_with_branches_recursive(opts={})
-
ret = Array.new
-
locked_module_refs = ModuleRefs::Lock.get(@assembly,:with_module_branches => true,:types => [:locked_dependencies])
-
# get component modules by finding the component module id in locked_module_refs elements
-
els_ndx_by_cmp_mod_ids = Hash.new
-
locked_module_refs.elements.each do |el|
-
if component_id = (el.module_branch||{})[:component_id]
-
els_ndx_by_cmp_mod_ids[component_id] = el
-
end
-
end
-
return ret if els_ndx_by_cmp_mod_ids.empty?
-
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id,:namespace_id],
-
:filter => [:oneof,:id, els_ndx_by_cmp_mod_ids.keys]
-
}
-
ret = Model.get_objs(@assembly.model_handle(:component_module),sp_hash)
-
ret.each do |r|
-
if el = els_ndx_by_cmp_mod_ids[r[:id]]
-
to_add = {
-
:namespace_name => el.namespace,
-
:dsl_parsed => (el.module_branch||{})[:dsl_parsed],
-
:module_branch => el.module_branch
-
}
-
r.merge!(to_add)
-
end
-
end
-
ret
-
end
-
# TODO: make sure that where these two overlap they are consistent in namespace assignments
-
1
def get_with_branches(opts={})
-
ndx_ret = Hash.new
-
add_module_branches = opts[:get_version_info]
-
# there is a row for each component; assumption is that all rows belonging to same component with have same branch
-
@assembly.get_objs(:cols=> [:instance_component_module_branches]).each do |r|
-
component_module = r[:component_module]
-
component_module.merge!({:namespace_name => r[:namespace][:display_name]}) if r[:namespace]
-
component_module.merge!({:dsl_parsed => r[:module_branch][:dsl_parsed]}) if r[:module_branch]
-
ndx_ret[component_module[:id]] ||= component_module.merge(add_module_branches ? r.hash_subset(:module_branch) : {})
-
end
-
ndx_ret.values
-
end
-
-
# TODO: is this is derived from ModuleRefs::Lock can do this more efficienctly by having ModuleRefs::Lock have base branch
-
1
def add_version_info!(modules_with_branches)
-
local_copy_els = Array.new
-
modules_with_branches.each do |r|
-
if r[:module_branch].assembly_module_version?()
-
r[:local_copy] = true
-
local_copy_els << r
-
end
-
end
-
-
# for each item with local_copy, check for diff_from_base
-
if local_copy_els.empty?
-
return modules_with_branches
-
end
-
# TODO: check if we are missing anything; maybe when there is just a meta change we dont update what component pointing to
-
# but create a new branch, which we can check with ComponentModule.get_workspace_module_branches with idhs from all els in modules_with_branches
-
# this is related to DTK-1214
-
-
# get the associated master branch and see if there is any diff
-
mod_idhs = local_copy_els.map{|r|r.id_handle()}
-
ndx_workspace_branches = ComponentModule.get_workspace_module_branches(mod_idhs).inject(Hash.new) do |h,r|
-
h.merge(r[:module_id] => r)
-
end
-
-
local_copy_els.each do |r|
-
unless workspace_branch = ndx_workspace_branches[r[:id]]
-
Log.error("Unexpected that ndx_workspace_branches[r[:id]] is null")
-
next
-
end
-
assembly_mod_branch = r[:module_branch]
-
unless assembly_mod_sha = assembly_mod_branch[:current_sha]
-
# This can happend if user goes into edit mode, but makes no changes o a component module
-
# r.delete(:local_copy) is so it does not appear as editted
-
r.delete(:local_copy)
-
next
-
end
-
unless workspace_mod_sha = workspace_branch[:current_sha]
-
Log.error("Unexpected that workspace_mod_sha is nil")
-
end
-
r[:local_copy_diff] = (assembly_mod_sha != workspace_mod_sha)
-
=begin
-
TODO: code to put in when
-
want to check case when :local_behind and :branchpoint
-
In order to do this must ireate all branches, not just changed ones and
-
need to do a refresh on workspace branch sha in case this was updated in another branch
-
=end
-
if r[:local_copy_diff]
-
sha_relationship = RepoManager.ret_sha_relationship(assembly_mod_sha, workspace_mod_sha, assembly_mod_branch)
-
case sha_relationship
-
when :local_behind, :local_ahead, :branchpoint
-
r[:branch_relationship] = sha_relationship
-
when :equal
-
r[:local_copy_diff] = false
-
end
-
end
-
end
-
-
modules_with_branches
-
end
-
-
end
-
end
-
end;end
-
-
2
module DTK; class AssemblyModule
-
1
class Service < self
-
1
r8_nested_require('service','workflow')
-
-
1
def initialize(assembly,opts={})
-
super(assembly)
-
@assembly_template_name = assembly_template_name?(assembly)
-
@service_module = opts[:service_module] || get_service_module(assembly)
-
@am_version = assembly_module_version(assembly)
-
end
-
1
private :initialize
-
-
# This checks if an assembly specfic branch has been made and returns this otherwise gives teh base branch
-
1
def self.get_assembly_branch(assembly)
-
new(assembly).get_assembly_branch()
-
end
-
1
def get_assembly_branch()
-
module_branches = @service_module.get_module_branches()
-
module_branches.find{|mb|mb.matches_version?(@am_version)} || module_branches.find{|mb|mb.matches_base_version?()}
-
end
-
1
def self.get_or_create_assembly_branch(assembly)
-
new(assembly).get_or_create_assembly_branch()
-
end
-
1
def get_or_create_assembly_branch()
-
@service_module.get_module_branch_matching_version(@am_version) || create_assembly_branch()
-
end
-
-
# returns a ModuleRepoInfo object
-
1
def self.prepare_for_edit(assembly,modification_type,opts={})
-
modification_type_obj = create_modification_type_object(assembly,modification_type,opts)
-
# trapping any error when using prepare for edit
-
modification_type_obj.create_and_update_assembly_branch?(:trap_error=>true)
-
end
-
-
1
def self.finalize_edit(assembly,modification_type,service_module,module_branch,diffs_summary,opts={})
-
modification_type_obj = create_modification_type_object(assembly,modification_type,{:service_module => service_module}.merge(opts))
-
modification_type_obj.finalize_edit(module_branch,diffs_summary)
-
end
-
-
1
def delete_module?(opts={})
-
service_module = get_service_module(@assembly,opts)
-
return if service_module == false
-
am_version = assembly_module_version()
-
service_module.delete_version?(am_version,:donot_delete_meta=>true)
-
end
-
-
1
private
-
# returns new module branch
-
1
def create_assembly_branch()
-
base_version = @service_module.get_field?(:version) #TODO: is this right; shouldnt version be on branch, not module
-
@service_module.create_new_version(base_version,@am_version)
-
end
-
-
1
def assembly_template_name?(assembly)
-
if assembly_template = assembly.get_parent()
-
assembly_template.get_field?(:display_name)
-
else
-
assembly_name = assembly.display_name_print_form()
-
Log.info("Assembly (#{assembly_name}) is not tied to an assembly template")
-
nil
-
end
-
end
-
-
1
def self.create_modification_type_object(assembly,modification_type,opts={})
-
modification_type_class(modification_type).new(assembly,opts)
-
end
-
-
1
def self.modification_type_class(modification_type)
-
case modification_type
-
when :workflow then Workflow
-
else raise ErrorUsage.new("Modification type (#{modification_type}) is not supported")
-
end
-
end
-
-
1
def get_service_module(assembly,opts={})
-
unless ret = assembly.get_service_module()
-
assembly_name = assembly.display_name_print_form()
-
return false if opts[:do_not_raise]
-
raise ErrorUsage.new("Assembly (#{assembly_name}) is not tied to a service")
-
end
-
ret
-
end
-
-
end
-
end; end
-
2
module DTK; class AssemblyModule
-
1
class Service
-
1
class Workflow < self
-
1
def initialize(assembly,opts={})
-
super(assembly,opts)
-
# opts[:task_action] can be nil
-
@task_action = opts[:task_action]
-
end
-
-
# returns a ModuleRepoInfo object
-
1
def create_and_update_assembly_branch?(opts={})
-
module_branch = get_or_create_assembly_branch()
-
-
if opts[:trap_error]
-
# TODO: removed trapped error so send error to client when the workflow name does not exist
-
# see if can remove completely
-
Log.info(':trap_error being ignored')
-
# begin
-
# update_assembly_branch(module_branch)
-
# rescue => e
-
# Log.info_pp(["trapped error in create_and_update_assembly_branch",e])
-
# end
-
end
-
-
update_assembly_branch(module_branch)
-
-
@service_module.get_workspace_branch_info(@am_version).merge(:edit_file => meta_file_path())
-
end
-
-
1
def finalize_edit(module_branch,diffs_summary)
-
parse_errors = nil
-
file_path = meta_file_path()
-
if diffs_summary.file_changed?(file_path)
-
file_content = RepoManager.get_file_content(file_path,module_branch)
-
format_type = Aux.format_type(file_path)
-
hash_content = Aux.convert_to_hash(file_content,format_type)
-
return hash_content if ServiceModule::ParsingError.is_error?(hash_content)
-
parse_errors = Task::Template::ConfigComponents.find_parse_errors(hash_content,@assembly)
-
Task::Template.create_or_update_from_serialized_content?(@assembly.id_handle(),hash_content,@task_action)
-
end
-
raise parse_errors if parse_errors
-
end
-
-
1
private
-
1
def update_assembly_branch(module_branch)
-
opts = {:serialized_form => true}
-
opts.merge!(:task_action => @task_action) if @task_action
-
template_content = Task::Template::ConfigComponents.get_or_generate_template_content(:assembly,@assembly,opts)
-
splice_in_workflow(module_branch,template_content)
-
end
-
-
1
def splice_in_workflow(module_branch,template_content)
-
hash_content = template_content.serialization_form()
-
module_branch.serialize_and_save_to_repo?(meta_file_path(),hash_content)
-
end
-
-
1
def meta_file_path()
-
ServiceModule.assembly_workflow_meta_filename_path(@assembly_template_name,@task_action||DefaultTaskAction)
-
end
-
# TODO: unify this with code on task/template
-
1
DefaultTaskAction = 'create'
-
-
end
-
end
-
end; end
-
1
module DTK
-
1
class BaseModule < Model
-
1
r8_nested_require('base_module','update_module')
-
1
r8_nested_require('base_module','version_context_info')
-
-
# TODO: look through r8_nested_require('module'..,) and see which ones should be under instead base_module
-
1
r8_nested_require('module','dsl')
-
1
r8_nested_require('module','node_module_dsl')
-
1
r8_nested_require('module','auto_import')
-
-
1
r8_nested_require('module','delete_mixin')
-
-
1
include DeleteMixin
-
1
extend ModuleClassMixin
-
1
extend AutoImport
-
1
include ModuleMixin
-
1
include UpdateModule::Mixin
-
1
extend UpdateModule::ClassMixin
-
-
1
def get_associated_assembly_templates()
-
1
ndx_ret = Hash.new
-
1
get_objs(:cols => [:assembly_templates]).each do |r|
-
assembly_template = r[:assembly_template]
-
ndx_ret[assembly_template[:id]] ||= Assembly::Template.create_as(assembly_template)
-
end
-
1
Assembly::Template.augment_with_namespaces!(ndx_ret.values)
-
end
-
-
# each of the module's component_templates associated with zero or more assembly template component references
-
# component refs indexed by component template; plus augmented info for cmp refs; it has form
-
# Component::Template:
-
# component_refs:
-
# - ComponentRef:
-
# node: Node
-
# assembly_template: Assembly::Template
-
1
def get_associated_assembly_cmp_refs()
-
ndx_ret = Hash.new
-
get_objs(:cols => [:assembly_templates]).each do |r|
-
component_template = r[:component_template]
-
pntr = ndx_ret[component_template[:id]] ||= component_template.merge(:component_refs => Array.new)
-
pntr[:component_refs] << r[:component_ref].merge(r.hash_subset(:id,:display_name,:node,:assembly_template))
-
end
-
ndx_ret.values
-
end
-
-
1
def get_associated_component_instances()
-
1
ndx_ret = Hash.new
-
1
get_objs(:cols => [:component_instances]).each do |r|
-
cmp = r[:component]
-
cmp[:namespace] = r[:namespace][:display_name] if r[:namespace]
-
ndx_ret[cmp[:id]] ||= Component::Instance.create_subclass_object(cmp)
-
end
-
1
ndx_ret.values
-
end
-
-
1
def info_about(about, cmp_id=nil)
-
case about.to_sym
-
when :components
-
get_objs(:cols => [:components]).map do |r|
-
cmp = r[:component]
-
branch = r[:module_branch]
-
unless branch.assembly_module_version?()
-
display_name = Component::Template.component_type_print_form(cmp[:component_type],Opts.new(:no_module_name => true))
-
{:id => cmp[:id], :display_name => display_name,:version => branch.version_print_form() }
-
end
-
end.compact.sort{|a,b|"#{a[:version]}-#{a[:display_name]}" <=>"#{b[:version]}-#{b[:display_name]}"}
-
when :attributes
-
results = get_objs(:cols => [:attributes])
-
results.delete_if { |e| !(e[:component][:id] == cmp_id.to_i) } if cmp_id && !cmp_id.empty?
-
-
# remove assembly branch attributes
-
results.delete_if { |e| e[:module_branch].assembly_module_version?() }
-
-
ret = results.inject([]) do |transformed, element|
-
attribute = element[:attribute]
-
branch = element[:module_branch]
-
transformed << { :id => attribute[:id], :display_name => attribute.print_path(element[:component]), :value => attribute[:value_asserted], :version=> branch.version_print_form()}
-
end
-
return ret.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
when :instances
-
results = get_objs(:cols => [:component_module_instances_assemblies])
-
# another query to get component instances that do not have assembly
-
results += get_objs(:cols => [:component_module_instances_node])
-
-
results.map do |el|
-
component_instance = el[:component_instance]
-
display_name_parts = {
-
:node => el[:node][:display_name],
-
:component => Component::Instance.print_form(component_instance),
-
}
-
display_name = "#{display_name_parts[:node]}/#{display_name_parts[:component]}"
-
if assembly = el[:assembly]
-
assembly_name = assembly[:display_name]
-
display_name_parts.merge!(:assembly => assembly_name)
-
display_name = "#{assembly_name}/#{display_name}"
-
end
-
{
-
:id => component_instance[:id],
-
:display_name => display_name,
-
:display_name_parts => display_name_parts,
-
:service_instance => display_name_parts[:assembly],
-
:node => display_name_parts[:node],
-
:component_instance => display_name_parts[:component],
-
:version => ModuleBranch.version_from_version_field(component_instance[:version])
-
}
-
end
-
else
-
raise Error.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end
-
end
-
-
1
def self.module_specific_type(config_agent_type)
-
config_agent_type
-
end
-
-
1
def module_branches()
-
self.update_object!(:module_branches)
-
self[:module_branch]
-
end
-
-
# raises exception if more repos found
-
1
def get_repo!()
-
repos = get_repos()
-
unless repos.size == 1
-
raise Error.new("unexpected that number of matching repos is not equal to 1")
-
end
-
-
return repos.first()
-
end
-
-
1
def get_repos()
-
1
get_objs_helper(:repos,:repo)
-
end
-
-
1
def get_associated_target_instances()
-
get_objs_uniq(:target_instances)
-
end
-
-
1
def config_agent_type_default()
-
ConfigAgent::Type.default_symbol()
-
end
-
-
1
private
-
-
1
def publish_preprocess_raise_error?(module_branch_obj)
-
# unless get_field?(:dsl_parsed)
-
unless module_branch_obj.dsl_parsed?()
-
raise ErrorUsage.new("Unable to publish module that has parsing errors. Please fix errors and try to publish again.")
-
end
-
end
-
-
end
-
end
-
# TODO: may cleanup: in some methods raise parsing errors and others pass back errors
-
# if dont want to find multiple errors on single pass we can simplify by having all raise errors and then remove all
-
# the statements that check whether responds is a parsing error (an usually return imemdiately; so not detecting multiple erros)
-
2
module DTK; class BaseModule
-
1
class UpdateModule
-
1
r8_nested_require('update_module','puppet_forge')
-
1
r8_nested_require('update_module','import')
-
1
r8_nested_require('update_module','clone_changes')
-
1
r8_nested_require('update_module','update_module_refs')
-
1
r8_nested_require('update_module','external_refs')
-
1
r8_nested_require('update_module','external_refs')
-
1
r8_nested_require('update_module','create')
-
1
r8_nested_require('update_module','scaffold_implementation')
-
1
include CreateMixin
-
-
1
def initialize(base_module)
-
@base_module = base_module
-
@module_class = base_module.class
-
end
-
-
####### mixin public methods #########
-
1
module ClassMixin
-
1
def import_from_puppet_forge(project,puppet_forge_local_copy,opts={})
-
PuppetForge.new(project,puppet_forge_local_copy,opts).import_module_and_missing_dependencies()
-
end
-
end
-
-
1
module Mixin
-
1
def import_from_git(commit_sha,repo_idh,version,opts={})
-
Import.new(self,version).import_from_git(commit_sha,repo_idh,opts)
-
end
-
-
1
def import_from_file(commit_sha,repo_idh,version,opts={})
-
Import.new(self,version).import_from_file(commit_sha,repo_idh,opts)
-
end
-
-
1
def update_model_from_clone_changes(commit_sha,diffs_summary,module_branch,version,opts={})
-
CloneChanges.new(self).update_from_clone_changes(commit_sha,diffs_summary,module_branch,version,opts)
-
end
-
-
1
def parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts={})
-
UpdateModule.new(self).parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts)
-
end
-
-
# called when installing from dtkn catalog
-
# returns nil or parsing error
-
1
def install__process_dsl(repo,module_branch,local,opts={})
-
# Skipping module_ref_update since module being isntalled has this set already so just copy this in
-
opts = {:update_module_refs_from_file => true}.merge(opts)
-
UpdateModule.new(self).install__process_dsl(repo,module_branch,local,opts)
-
end
-
-
1
def pull_from_remote__update_from_dsl(repo, module_and_branch_info,version=nil)
-
UpdateModule.new(self).pull_from_remote__update_from_dsl(repo, module_and_branch_info,version)
-
end
-
-
# returns the new module branch
-
# This is caledd when cerating a service insatnce specific component module
-
1
def create_new_version__type_specific(repo_for_new_branch,new_version,opts={})
-
local = UpdateModule.ret_local(self,new_version)
-
# TODO: this is expensive in that it creates new version by parsing the dsl and reading back in;
-
# would be much less expsensive to clone from branch to branch
-
opts_update = {:update_module_refs_from_file => true}.merge(opts)
-
response = UpdateModule.new(self).create_needed_objects_and_dsl?(repo_for_new_branch,local,opts_update)
-
response[:module_branch_idh].create_object()
-
end
-
end
-
####### end: mixin public methods #########
-
-
# TODO: for testing
-
1
def test_generate_dsl()
-
module_branch = get_module_branch_matching_version()
-
config_agent_type = :puppet
-
impl_obj = module_branch.get_implementation()
-
ScaffoldImplementation.create_dsl(module_name(),config_agent_type,impl_obj)
-
end
-
### end: for testing
-
-
1
def install__process_dsl(repo,module_branch,local,opts={})
-
response = create_needed_objects_and_dsl?(repo,local,opts)
-
if is_parsing_error?(response)
-
response
-
else
-
module_branch.set_dsl_parsed!(true)
-
nil
-
end
-
end
-
-
1
def pull_from_remote__update_from_dsl(repo, module_and_branch_info,version=nil)
-
info = module_and_branch_info #for succinctness
-
module_branch_idh = info[:module_branch_idh]
-
module_branch = module_branch_idh.create_object().merge(:repo => repo)
-
create_needed_objects_and_dsl?(repo,ret_local(version))
-
end
-
-
# only returns non nil if passring error; it traps parsing errors
-
1
def parse_dsl_and_update_model_with_err_trap(impl_obj,module_branch_idh,version,opts={})
-
klass()::ParsingError.trap(:only_return_error=>true){parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts)}
-
end
-
1
def parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts={})
-
ret = Hash.new
-
module_branch = module_branch_idh.create_object()
-
-
module_branch.set_dsl_parsed!(false)
-
config_agent_type = opts[:config_agent_type] || config_agent_type_default()
-
# TODO: for efficiency can change parse_dsl to take option opts[:dsl_created_info]
-
dsl_obj = parse_dsl(impl_obj,opts.merge(:config_agent_type => config_agent_type))
-
return dsl_obj if is_parsing_error?(dsl_obj)
-
-
dsl_obj.update_model_with_ref_integrity_check(:version => version)
-
-
if opts[:update_from_includes]
-
ret = UpdateModuleRefs.new(dsl_obj,@base_module).validate_includes_and_update_module_refs()
-
return ret if is_parsing_error?(ret)
-
end
-
-
external_deps = ret[:external_dependencies]
-
-
if opts[:update_module_refs_from_file]
-
# updating module refs from the component_module_ref file
-
ModuleRefs::Parse.update_component_module_refs(@module_class,module_branch)
-
else
-
opts_save_dsl = Opts.create?(:message? => ret[:message],:external_dependencies? => external_deps)
-
if dsl_updated_info = UpdateModuleRefs.save_dsl?(module_branch,opts_save_dsl)
-
if opts[:ret_dsl_updated_info]
-
opts[:ret_dsl_updated_info] = dsl_updated_info
-
end
-
end
-
end
-
-
# TODO: see if can simplify and make this an 'else' to opts[:update_from_includes above
-
unless opts[:update_from_includes]
-
module_branch.set_dsl_parsed!(true) if !opts[:dsl_parsed_false]
-
return ret
-
end
-
-
no_errors = external_deps.nil? || !external_deps.any_errors?()
-
if no_errors and !opts[:dsl_parsed_false]
-
module_branch.set_dsl_parsed!(true)
-
end
-
-
opts[:external_dependencies] = external_deps if external_deps
-
ret unless no_errors
-
end
-
-
1
def self.ret_local(base_module,version)
-
local_params = ModuleBranch::Location::LocalParams::Server.new(
-
:module_type => base_module.module_type(),
-
:module_name => base_module.module_name(),
-
:namespace => base_module.module_namespace(),
-
:version => version
-
)
-
local_params.create_local(base_module.get_project())
-
end
-
-
1
def add_dsl_to_impl_and_create_objects(dsl_created_info,project,impl_obj,module_branch_idh,version,opts={})
-
impl_obj.add_file_and_push_to_repo(dsl_created_info[:path],dsl_created_info[:content])
-
opts.merge!(:project => project,:dsl_created_info => dsl_created_info)
-
parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts)
-
end
-
-
1
private
-
1
def klass()
-
case @module_class
-
when NodeModule
-
NodeModuleDSL
-
else
-
ModuleDSL
-
end
-
end
-
-
1
def ret_local(version)
-
self.class.ret_local(@base_module,version)
-
end
-
1
def parse_dsl(impl_obj,opts={})
-
klass().parse_dsl(@base_module,impl_obj,opts)
-
end
-
1
def update_component_module_refs(module_branch,matching_module_refs)
-
UpdateModuleRefs.update_component_module_refs(module_branch,matching_module_refs,@base_module)
-
end
-
-
1
def set_dsl_parsed!(boolean)
-
@base_module.set_dsl_parsed!(boolean)
-
end
-
1
def module_namespace()
-
@base_module.module_namespace()
-
end
-
1
def module_name()
-
@base_module.module_name()
-
end
-
1
def module_type()
-
@base_module.module_type()
-
end
-
1
def config_agent_type_default()
-
@base_module.config_agent_type_default()
-
end
-
1
def get_project()
-
@base_module.get_project()
-
end
-
1
def is_parsing_error?(response)
-
ModuleDSL::ParsingError.is_error?(response)
-
end
-
-
end
-
end; end
-
-
3
module DTK; class BaseModule; class UpdateModule
-
1
class CloneChanges < self
-
1
def update_from_clone_changes(commit_sha,diffs_summary,module_branch,version,opts={})
-
ret = ModuleDSLInfo.new()
-
opts.merge!(:ret_dsl_updated_info => Hash.new)
-
dsl_created_info = ModuleDSLInfo::CreatedInfo.new()
-
module_namespace = module_namespace()
-
impl_obj = module_branch.get_implementation()
-
local = ret_local(version)
-
project = local.project
-
opts.merge!(:project => project)
-
# TODO: make more robust to handle situation where diffs dont cover all changes; think can detect by looking at shas
-
impl_obj.modify_file_assets(diffs_summary)
-
-
if version.kind_of?(ModuleVersion::AssemblyModule)
-
if meta_file_changed = diffs_summary.meta_file_changed?()
-
if e = parse_dsl_and_update_model(impl_obj,module_branch.id_handle(),version,opts)
-
ret.dsl_parse_error = e
-
end
-
end
-
assembly = version.get_assembly(@base_module.model_handle(:component))
-
opts_finalize = (meta_file_changed ? {:meta_file_changed => true} : {})
-
opts_finalize.merge!(:service_instance_module => true) if opts[:service_instance_module]
-
opts_finalize.merge!(:current_branch_sha => opts[:current_branch_sha]) if opts[:current_branch_sha]
-
AssemblyModule::Component.finalize_edit(assembly,@base_module,module_branch,opts_finalize)
-
elsif ModuleDSL.contains_dsl_file?(impl_obj)
-
if opts[:force_parse] or diffs_summary.meta_file_changed?() or (module_branch.dsl_parsed?() == false)
-
if e = parse_dsl_and_update_model_with_err_trap(impl_obj,module_branch.id_handle(),version,opts)
-
ret.dsl_parse_error = e
-
end
-
end
-
else
-
config_agent_type = config_agent_type_default()
-
dsl_created_info = ScaffoldImplementation.create_dsl(module_name(),config_agent_type,impl_obj)
-
end
-
-
dsl_updated_info = opts[:ret_dsl_updated_info]
-
unless dsl_updated_info.empty?
-
ret.dsl_updated_info = dsl_updated_info
-
end
-
-
ret.set_external_dependencies?(opts[:external_dependencies])
-
ret.dsl_created_info = dsl_created_info
-
ret
-
end
-
-
1
private
-
1
def parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts={})
-
# need to return dsl_parse_error to display error message when push module updates from service instance
-
klass()::ParsingError.trap(:only_return_error=>true){@base_module.parse_dsl_and_update_model(impl_obj,module_branch_idh,version,opts)}
-
end
-
end
-
end; end; end
-
3
module DTK; class BaseModule; class UpdateModule
-
1
module CreateMixin
-
1
def create_needed_objects_and_dsl?(repo, local, opts={})
-
ret = Hash.new
-
opts.merge!(:ret_dsl_updated_info => Hash.new)
-
project = local.project
-
version = local.version
-
config_agent_type = opts[:config_agent_type] || config_agent_type_default()
-
impl_obj = Implementation.create?(project,local,repo,config_agent_type)
-
impl_obj.create_file_assets_from_dir_els()
-
-
ret_hash = {
-
:name => module_name(),
-
:namespace => module_namespace(),
-
:type => module_type(),
-
:version => version,
-
:impl_obj => impl_obj,
-
:config_agent_type => config_agent_type
-
}
-
ret.merge!(ret_hash)
-
-
module_and_branch_info = @module_class.create_module_and_branch_obj?(project,repo.id_handle(),local,opts[:ancestor_branch_idh])
-
module_branch_idh = module_and_branch_info[:module_branch_idh]
-
module_branch = module_branch_idh.create_object()
-
-
# process any external refs if one of the flags :process_provider_specific_dependencies,:set_external_refs is true
-
opts_external_refs = Aux.hash_subset(opts,[:process_provider_specific_dependencies,:set_external_refs])
-
unless opts_external_refs.empty?
-
# external_ref if non null ,will have info from the config agent related meta files such as Puppert ModuleFile
-
if external_ref = ConfigAgent.parse_provider_specific_dependencies?(config_agent_type, impl_obj)
-
module_branch.update_external_ref(external_ref[:content]) if external_ref[:content]
-
if opts[:process_provider_specific_dependencies]
-
# check_and_ret_external_ref_dependencies? returns a hash that can have keys: :external_dependencies and :matching_module_refs
-
ret.merge!(ExternalRefs.new(@base_module).check_and_ret_external_ref_dependencies?(external_ref,project,module_branch))
-
end
-
end
-
end
-
-
dsl_created_info = ModuleDSLInfo::CreatedInfo.new()
-
if klass().contains_dsl_file?(impl_obj)
-
if err = parse_dsl_and_update_model_with_err_trap(impl_obj,module_branch_idh,version,opts.merge!(:project => project))
-
ret.merge!(:dsl_parse_error => err)
-
end
-
elsif opts[:scaffold_if_no_dsl]
-
opts_scaffold = Opts.create?(:include_modules? => include_modules?(ret[:matching_module_refs],ret[:external_dependencies]))
-
dsl_created_info = ScaffoldImplementation.create_dsl(module_name(),config_agent_type,impl_obj,opts_scaffold)
-
if opts[:commit_dsl]
-
# add dsl file and create DTK module objects from the dsl
-
add_dsl_to_impl_and_create_objects(dsl_created_info,project,impl_obj,module_branch_idh,version,opts)
-
else
-
Log.error("Unexpected that opts[:commit_dsl] is false when opts[:scaffold_if_no_dsl] is true")
-
end
-
end
-
-
# move top level folders/files in provider subfolder
-
Import.move_content_to_provider_subdir(repo, impl_obj) if Import::IMPORT_FORM.eql?('puppet_folder_form')
-
-
dsl_updated_info = opts[:ret_dsl_updated_info]
-
if dsl_updated_info && !dsl_updated_info.empty?
-
ret.merge!(:dsl_updated_info => dsl_updated_info)
-
end
-
-
ret.merge(:module_branch_idh => module_branch_idh, :dsl_created_info => dsl_created_info)
-
end
-
-
1
private
-
1
def include_modules?(matching_module_refs,external_dependencies)
-
ret = nil
-
return ret unless matching_module_refs or external_dependencies
-
ret = Array.new
-
if matching_module_refs
-
matching_module_refs.each{|r|ret << r.component_module}
-
end
-
if external_dependencies
-
if missing = external_dependencies.possibly_missing?
-
# assuming that each element is of form ns/module or module
-
missing.each{|r|ret << r.split('/').last}
-
end
-
if ambiguous = external_dependencies.ambiguous?
-
# ambiguous is has with keys ns/module or module
-
# example is {"puppetlabs/stdlib"=>["puppetlabs", "r8"]}}
-
ambiguous.each_key{|r|ret << r.split('/').last}
-
end
-
#TODO: add inconstent elements
-
end
-
ret.uniq unless ret.empty?
-
end
-
-
end
-
end; end; end
-
# TODO: Aldin: eventually cleanup to use new methods in module_refs/component_dsl_form and in upadet_module_refs
-
2
module DTK; class BaseModule
-
1
class UpdateModule
-
1
class ExternalRefs < self
-
# returns a hash that can have keys
-
# :external_dependencies
-
# :matching_module_refs
-
1
def check_and_ret_external_ref_dependencies?(external_ref,project,module_branch)
-
ret = Hash.new
-
return ret unless dependencies = external_ref[:dependencies]
-
-
parsed_dependencies = dependencies.map{|dep|dep.parsed_form?()}.compact
-
return ret if parsed_dependencies.empty?
-
-
all_match_hashes, all_inconsistent, all_possibly_missing, all_inconsistent_names = {}, [], [], []
-
all_ambiguous, all_ambiguous_ns, temp_existing = [], [], {}
-
all_modules = @module_class.get_all(project.id_handle()).map{|cmp_mod|ComponentModuleWrapper.new(cmp_mod)}
-
existing_module_refs = get_existing_module_refs(module_branch)
-
-
parsed_dependencies.each do |parsed_dependency|
-
dep_name = parsed_dependency[:name].strip()
-
version_constraints = parsed_dependency[:version_constraints]
-
match, inconsistent, possibly_missing = nil, nil, nil
-
-
# if there is no component_modules or just this one in database, mark all dependencies as possibly missing
-
base_module_id = @base_module.id()
-
all_modules_except_this = all_modules.reject{|cmp_mod_wrapper|cmp_mod_wrapper.id == base_module_id}
-
all_possibly_missing << dep_name if all_modules_except_this.empty?
-
-
all_modules_except_this.each do |cmp_mod_w|
-
cmp_mod_w.module_branches().each do |branch_w|
-
if branch_w.has_external_ref?()
-
branch = branch_w.branch
-
branch_name = branch_w.branch_name
-
branch_version = branch_w.branch_version
-
-
if (branch_name && branch_version)
-
matched_branch_version = branch_version.match(/(\d+\.\d+\.\d+)/)
-
branch_version = matched_branch_version[1]
-
-
evaluated, br_version, constraint_op, req_version, required_version = false, nil, nil, nil, nil
-
if dep_name.eql?(branch_name)
-
# version_constraints.nil? || empty? means no version constraint
-
if version_constraints.nil? || version_constraints.empty?
-
evaluated = true
-
else
-
version_constraints.each do |vconst|
-
required_version = vconst[:version]
-
br_version = branch_version.gsub('.','')
-
constraint_op = vconst[:constraint]
-
req_version = required_version.gsub('.','')
-
-
# if version contraints in form of 4.x
-
if req_version.to_s.include?('x')
-
req_version.gsub!(/x/,'')
-
evaluated = br_version.to_s.start_with?(req_version.to_s)
-
else
-
evaluated = eval("#{br_version}#{constraint_op}#{req_version}")
-
end
-
break if evaluated == false
-
end
-
end
-
-
if evaluated
-
if all_match_hashes.has_key?(dep_name)
-
already_in_ambiguous = all_ambiguous.select{|amb| amb.values.include?(dep_name)}
-
if already_in_ambiguous.empty?
-
namespace_info = all_match_hashes[dep_name].get_namespace_info
-
all_ambiguous << {:name => dep_name, :namespace => namespace_info[:namespace][:display_name]}
-
end
-
namespace_info = branch.get_namespace_info
-
all_ambiguous << {:name => dep_name, :namespace => namespace_info[:namespace][:display_name]}
-
end
-
-
if existing_module_refs.empty? || existing_module_refs['component_modules'].nil?
-
all_match_hashes.merge!(dep_name => branch)
-
else
-
name = dep_name.split('/').last
-
namespace_info = branch.get_namespace_info
-
existing_namespace = existing_module_refs['component_modules']["#{name}"]
-
if existing_namespace && existing_namespace['namespace'].eql?(namespace_info[:namespace][:display_name])
-
all_match_hashes.merge!(dep_name => branch)
-
else
-
if temp_existing.has_key?(dep_name)
-
temp_namespace_info = temp_existing[dep_name].get_namespace_info
-
all_ambiguous << {:name => dep_name, :namespace => temp_namespace_info[:namespace][:display_name]}
-
all_ambiguous << {:name => dep_name, :namespace => namespace_info[:namespace][:display_name]}
-
end
-
temp_existing.merge!(dep_name => branch)
-
end
-
end
-
else
-
all_inconsistent << "#{dep_name} (current:#{branch_version}, required:#{constraint_op}#{required_version})"
-
all_inconsistent_names << dep_name
-
end
-
-
else
-
all_possibly_missing << dep_name
-
end
-
end
-
else
-
all_possibly_missing << dep_name
-
end
-
end
-
end
-
end
-
-
check_if_matching_or_ambiguous(module_branch, all_ambiguous)
-
all_ambiguous_ns = all_ambiguous.map{|am| am[:name]} unless all_ambiguous.empty?
-
unless all_ambiguous_ns.empty? || all_match_hashes.empty?
-
all_ambiguous_ns.uniq!
-
all_match_hashes.delete_if{|k,v|all_ambiguous_ns.include?(k)}
-
end
-
-
ambiguous_grouped = {}
-
unless all_ambiguous.empty?
-
ambiguous_g = all_ambiguous.group_by { |h| h[:name] }
-
ambiguous_g.each do |k,v|
-
namespaces = v.map{|a| a[:namespace]}
-
ambiguous_grouped.merge!(k => namespaces)
-
end
-
end
-
-
if component_module_refs = component_module_refs?(all_match_hashes)
-
ret.merge!(:matching_module_refs => component_module_refs)
-
end
-
-
all_inconsistent = (all_inconsistent - all_match_hashes.keys)
-
all_possibly_missing = (all_possibly_missing.uniq - all_inconsistent_names - all_match_hashes.keys - all_ambiguous_ns.uniq)
-
ext_deps_hash = {
-
:inconsistent => all_inconsistent.uniq,
-
:possibly_missing => all_possibly_missing.uniq
-
}
-
unless ambiguous_grouped.empty?
-
ext_deps_hash.merge!(:ambiguous => ambiguous_grouped)
-
end
-
ret.merge(:external_dependencies => ExternalDependencies.new(ext_deps_hash))
-
end
-
-
1
def check_if_matching_or_ambiguous(module_branch, ambiguous)
-
existing_c_hash = get_existing_module_refs(module_branch)
-
if existing = existing_c_hash['component_modules']
-
existing.each do |k,v|
-
if k && v
-
amb = ambiguous.select{|a| a[:name].split('/').last.eql?(k) && a[:namespace].eql?(v['namespace'])}
-
ambiguous.delete_if{|amb| amb[:name].split('/').last.eql?(k)} unless amb.empty?
-
end
-
end
-
end
-
end
-
-
1
def component_module_refs?(all_match_hashes)
-
ret = nil
-
return ret unless all_match_hashes
-
ndx_ret = all_match_hashes.values.inject(Hash.new) do |h,r|
-
h.merge(r.id() => r)
-
end
-
unless ndx_ret.empty?
-
ModuleRefs::ComponentDSLForm.create_from_module_branches?(ndx_ret.values)
-
end
-
end
-
-
1
def get_existing_module_refs(module_branch)
-
existing_c_hash = {}
-
existing_content = RepoManager.get_file_content({:path => ModuleRefs.meta_filename_path()}, module_branch, {:no_error_if_not_found => true})
-
existing_c_hash = Aux.convert_to_hash(existing_content,:yaml) if existing_content
-
existing_c_hash
-
end
-
# for caching info
-
1
class ComponentModuleWrapper
-
1
def initialize(cmp_mod)
-
@cmp_mod = cmp_mod
-
end
-
1
def id()
-
@cmp_mod.id()
-
end
-
1
def module_branches()
-
@module_branches ||= @cmp_mod.get_module_branches().map{|b|Branch.new(b)}
-
end
-
-
1
class Branch
-
1
attr_reader :branch
-
1
def initialize(branch)
-
@branch = branch
-
end
-
1
def has_external_ref?()
-
!external_ref.nil?
-
end
-
1
def branch_name()
-
(branch_hash[:name]||'').gsub('-','/').strip()
-
end
-
1
def branch_version
-
branch_hash[:version]
-
end
-
1
private
-
1
def external_ref()
-
@branch[:external_ref]
-
end
-
1
def branch_hash()
-
# TODO: get rid of use of eval; for metadata source dont turn into string in first place
-
@branch_hash ||= (external_ref && eval(external_ref))||{}
-
end
-
end
-
end
-
-
end # ExternalRefsMixi
-
end #ManagementMixin
-
end; end
-
# This imports a single module
-
3
module DTK; class BaseModule; class UpdateModule
-
1
class Import < self
-
# IMPORT_FORM can have values:
-
# 'flat_form' put all content in top level folder (default behavior)
-
# 'puppet_folder_form' put puppet content in puppet subfolder
-
1
IMPORT_FORM = 'flat_form'
-
-
1
def initialize(base_module,version=nil)
-
super(base_module)
-
@module_branch = base_module.get_workspace_module_branch(version)
-
end
-
-
1
def self.import_puppet_forge_module(project,local_params,source_directory,cmr_update_els)
-
config_agent_type = :puppet
-
opts_create_mod = Opts.new(
-
:config_agent_type => config_agent_type,
-
:copy_files => {:source_directory => source_directory},
-
:no_error_if_exists => true
-
)
-
-
module_and_branch_info = ComponentModule.create_module(project,local_params,opts_create_mod)
-
module_branch_idh = module_and_branch_info[:module_branch_idh]
-
module_branch = module_branch_idh.create_object()
-
repo_id = module_and_branch_info[:module_repo_info][:repo_id]
-
repo = project.model_handle(:repo).createIDH(:id => repo_id).create_object()
-
-
source = generate_source(local_params)
-
impl_obj = Implementation.create?(project,local_params,repo,config_agent_type)
-
impl_obj.create_file_assets_from_dir_els()
-
-
if external_ref = ConfigAgent.parse_provider_specific_dependencies?(config_agent_type, impl_obj)
-
if content = external_ref[:content]
-
content[:source] = source if source
-
module_branch.update_external_ref(content)
-
end
-
else
-
module_branch.update_external_ref(:source => source) if source
-
end
-
-
component_module = module_and_branch_info[:module_idh].create_object()
-
-
module_branch.set_dsl_parsed!(false)
-
include_modules = cmr_update_els.map{|r|r.component_module}
-
-
# scaffold Puppet manifests
-
opts_scaffold = Opts.create?(
-
:ret_hash_content => true,
-
:include_modules? => include_modules.empty? ? nil : include_modules
-
)
-
dsl_created_info = ScaffoldImplementation.create_dsl(local_params.module_name(),config_agent_type,impl_obj,opts_scaffold)
-
-
# move top level folders/files in provider subfolder
-
move_content_to_provider_subdir(repo, impl_obj) if IMPORT_FORM.eql?('puppet_folder_form')
-
-
# add dsl file and create DTK module objects from the dsl
-
UpdateModule.new(component_module).add_dsl_to_impl_and_create_objects(dsl_created_info,project,impl_obj,module_branch_idh,local_params.version)
-
-
UpdateModuleRefs.update_component_module_refs_and_save_dsl?(module_branch,cmr_update_els,component_module)
-
-
module_branch.set_dsl_parsed!(true)
-
# need component module id to be returned to client
-
component_module[:id]
-
end
-
-
1
def import_from_file(commit_sha,repo_idh,opts={})
-
ret = UpdateModuleOutput.new()
-
pull_was_needed = @module_branch.pull_repo_changes?(commit_sha)
-
-
parse_needed = !@module_branch.dsl_parsed?()
-
return ret unless pull_was_needed or parse_needed
-
-
repo = repo_idh.create_object()
-
local = ret_local(@version)
-
-
# TODO: provider is hardcoded to puppet until we introduce more provider types
-
opts.merge!(:provider => 'puppet')
-
create_info = create_needed_objects_and_dsl?(repo,local,opts)
-
return create_info if create_info[:dsl_parse_error] && is_parsing_error?(create_info[:dsl_parse_error])
-
-
ret = UpdateModuleOutput.create_from_update_create_info(create_info)
-
external_deps = ret.external_dependencies()
-
-
component_module_refs = update_component_module_refs(@module_branch, create_info[:matching_module_refs])
-
return component_module_refs if is_parsing_error?(component_module_refs)
-
-
opts_save_dsl = Opts.create?(
-
:create_empty_module_refs => true,
-
:component_module_refs => component_module_refs,
-
:external_deps? => external_deps
-
)
-
if dsl_updated_info = UpdateModuleRefs.save_dsl?(@module_branch, opts_save_dsl)
-
if opts[:ret_dsl_updated_info]
-
ret.merge!(:dsl_updated_info => dsl_updated_info)
-
end
-
end
-
-
if !external_deps.any_errors? and !opts[:dsl_parsed_false]
-
@module_branch.set_dsl_parsed!(true)
-
end
-
-
ret
-
end
-
-
1
def import_from_git(commit_sha,repo_idh,opts={})
-
ret = UpdateModuleOutput.new()
-
pull_was_needed = @module_branch.pull_repo_changes?(commit_sha)
-
-
parse_needed = !@module_branch.dsl_parsed?()
-
return ret unless pull_was_needed or parse_needed
-
repo = repo_idh.create_object()
-
local = ret_local(@version)
-
-
create_info = create_needed_objects_and_dsl?(repo,local,opts)
-
return create_info if create_info[:dsl_parse_error] && is_parsing_error?(create_info[:dsl_parse_error])
-
-
version = create_info[:version] # TODO: is this right or just user @version where refer to 'version'
-
impl_obj = create_info[:impl_obj]
-
ret = UpdateModuleOutput.create_from_update_create_info(create_info)
-
external_deps = ret.external_dependencies()
-
-
@module_branch.set_dsl_parsed!(false)
-
-
opts_parse = {:config_agent_type => create_info[:config_agent_type]}.merge(opts)
-
if dsl_created_info = ret.dsl_created_info?
-
opts_parse.merge!(:dsl_created_info => dsl_created_info)
-
end
-
dsl_obj = parse_dsl(impl_obj,opts_parse)
-
return dsl_obj if is_parsing_error?(dsl_obj)
-
-
dsl_obj.update_model_with_ref_integrity_check(:version => version)
-
-
component_module_refs = update_component_module_refs(@module_branch,create_info[:matching_module_refs])
-
return component_module_refs if is_parsing_error?(component_module_refs)
-
-
hash_opt_save_dsl = {
-
:component_module_refs => component_module_refs,
-
:create_empty_module_refs => true,
-
:external_dependencies? => external_deps
-
}
-
dsl_updated_info = UpdateModuleRefs.save_dsl?(@module_branch, Opts.create?(hash_opt_save_dsl))
-
if opts[:ret_dsl_updated_info]
-
ret.merge!(:dsl_updated_info => dsl_updated_info)
-
end
-
-
if !external_deps.any_errors? and !opts[:dsl_parsed_false]
-
@module_branch.set_dsl_parsed!(true)
-
end
-
-
ret
-
end
-
-
1
private
-
1
def dsl_parsed?()
-
@base_module.dsl_parsed?()
-
end
-
-
1
def self.generate_source(local_params)
-
return unless local_params.source_name
-
"puppetforge://#{local_params.source_name}"
-
end
-
-
1
def self.move_content_to_provider_subdir(repo, impl_obj)
-
repo.update_object!(:local_dir)
-
local_dir = repo[:local_dir]
-
impl_obj.move_to_provider_subdir(local_dir, "#{local_dir}/puppet")
-
end
-
end
-
end; end; end
-
-
# This can import multiple modules; it uses Import.import_puppet_forge_module to import each moodule that
-
# needs to be installed
-
3
module DTK; class BaseModule; class UpdateModule
-
1
class PuppetForge
-
1
def initialize(project,pf_local_copy,opts={})
-
@project = project
-
@pf_local_copy = pf_local_copy
-
@base_namespace = opts[:base_namespace] || default_namespace()
-
@config_agent_type = :puppet
-
end
-
-
1
def import_module_and_missing_dependencies()
-
# Check for dependencies; resturns missing_modules, found_modules, dependency_warnings
-
missing, found_modules, dw = ComponentModule.cross_reference_modules(
-
Opts.new(:project_idh => @project.id_handle()),
-
@pf_local_copy.module_dependencies
-
)
-
-
# generate list of modules that need to be created from puppet_forge_local_copy
-
pf_modules = @pf_local_copy.modules(:remove => found_modules)
-
-
installed_modules = pf_modules.collect{ |pf_module| import_module(pf_module) }
-
-
# pass back info about
-
# - what was loaded from puppet forge,
-
# - what was present but needed, and
-
# - any dependency_warnings
-
-
format_response(installed_modules, found_modules)
-
end
-
-
1
private
-
-
1
def default_namespace()
-
Namespace.default_namespace_name()
-
end
-
-
1
def import_module(pf_module)
-
params_opts = {}
-
module_name = pf_module.default_local_module_name
-
-
MessageQueue.store(:info, "Parsing puppet forge module '#{module_name}' ...")
-
-
# dependencies user their own namespace
-
namespace = pf_module.is_dependency ? pf_module.namespace : @base_namespace
-
source_directory = pf_module.path
-
cmr_update_els = component_module_refs_dsl_form_els(pf_module.dependencies)
-
-
params_opts.merge!(:source_name => pf_module.module_source_name) if pf_module.module_source_name
-
local_params = local_params(module_name, namespace, params_opts)
-
module_id = Import.import_puppet_forge_module(@project,local_params,source_directory,cmr_update_els)
-
-
# set id for puppet-forge modules because they will be used on client side to clone modules to local machine
-
pf_module.set_id(module_id)
-
pf_module
-
end
-
-
1
def component_module_refs_dsl_form_els(dependencies)
-
ret = ModuleRefs::ComponentDSLForm::Elements.new
-
dependencies.each{|dep|ret << ModuleRefs::ComponentDSLForm.new(dep.name,dep.namespace)}
-
ret
-
end
-
-
1
def local_params(module_name, namespace, opts={})
-
version = opts[:version]
-
source_name = opts[:source_name]
-
ModuleBranch::Location::LocalParams::Server.new(
-
:module_type => :component_module,
-
:module_name => module_name,
-
:version => version,
-
:namespace => namespace,
-
:source_name => source_name
-
)
-
end
-
-
1
def format_response(installed_modules, found_modules)
-
main_module = installed_modules.find { |im| !im.is_dependency }
-
main_module.namespace = @base_namespace
-
{
-
:main_module => main_module.to_h,
-
:installed_modules => (installed_modules - [main_module]).collect { |im| im.to_h },
-
:found_modules => found_modules
-
}
-
end
-
end
-
end; end; end
-
-
3
module DTK; class BaseModule; class UpdateModule
-
1
module ScaffoldImplementation
-
# Rich: DTK-1754 pass in an (optional) option that indicates scaffolding strategy
-
# will build in flexibility to support a number of varaints in how Puppet as an example
-
# gets mapped to a starting point dtk.model.yaml file
-
# Initially we wil hace existing stargey for the top level and
-
# completely commented out for the component module dependencies
-
# As we progress we can identiy two pieces of info
-
# 1) what signatures get parsed (e.g., only top level puppet ones) and put in dtk
-
# 2) what signatures get parsed and put in commented out
-
1
def self.create_dsl(module_name,config_agent_type,impl_obj,opts={})
-
ret = ModuleDSLInfo::CreatedInfo.new()
-
parsing_error = nil
-
render_hash = nil
-
begin
-
impl_parse = ConfigAgent.parse_given_module_directory(config_agent_type,impl_obj)
-
dsl_generator = ModuleDSL::GenerateFromImpl.create()
-
# refinement_hash is version neutral form gotten from version specfic dsl_generator
-
refinement_hash = dsl_generator.generate_refinement_hash(impl_parse,module_name,impl_obj.id_handle())
-
render_hash = refinement_hash.render_hash_form(opts)
-
rescue ErrorUsage => e
-
# parsing_error = ErrorUsage.new("Error parsing #{config_agent_type} files to generate meta data")
-
parsing_error = e
-
rescue => e
-
Log.error_pp([:parsing_error,e,e.backtrace[0..10]])
-
raise e
-
end
-
if render_hash
-
format_type = ModuleDSL.default_format_type()
-
content = render_hash.serialize(format_type)
-
dsl_filename = ModuleDSL.dsl_filename(format_type)
-
ret.merge!(:path=>dsl_filename, :content=> content)
-
if opts[:ret_hash_content]
-
ret.merge!(:hash_content => render_hash)
-
end
-
end
-
raise parsing_error if parsing_error
-
ret
-
end
-
end
-
end; end; end
-
3
module DTK; class BaseModule; class UpdateModule
-
1
class UpdateModuleRefs < self
-
-
1
def initialize(dsl_obj,base_module)
-
super(base_module)
-
@input_hash = dsl_obj.input_hash
-
@project_idh = dsl_obj.project_idh
-
@module_branch = dsl_obj.module_branch
-
end
-
-
# opts can have keys
-
# :message
-
# :create_empty_module_refs
-
# :external_dependencies
-
1
def self.update_component_module_refs_and_save_dsl?(module_branch,cmr_update_els,base_module,opts={})
-
component_module_refs = update_component_module_refs(module_branch,cmr_update_els,base_module)
-
save_dsl?(module_branch,opts.merge(:component_module_refs => component_module_refs))
-
end
-
-
1
def self.update_component_module_refs(module_branch,cmr_update_els,base_module)
-
ModuleRefs::Parse.update_component_module_refs_from_parse_objects(base_module.class,module_branch,cmr_update_els)
-
end
-
1
def update_component_module_refs(cmr_update_els)
-
self.class.update_component_module_refs(@module_branch,cmr_update_els,@base_module)
-
end
-
1
private :update_component_module_refs
-
-
-
# if an update is made it returns ModuleDSLInfo::UpdatedInfo object
-
# opts can have keys
-
# :message
-
# :create_empty_module_refs
-
# :component_module_refs
-
# :external_dependencies
-
1
def self.save_dsl?(module_branch,opts={})
-
# For Rich: DTK-1925
-
# think we should use code bellow because when import from file for some reason opts[:component_module_refs].component_modules
-
# will be empty but ModuleRefs.get_component_module_refs(module_branch) will return valid module_refs
-
#
-
# this line bellow
-
component_module_refs = opts[:component_module_refs] || ModuleRefs.get_component_module_refs(module_branch)
-
# should be changed with this code:
-
# cmp_mod_refs = opts[:component_module_refs]
-
# if cmp_mod_refs && !cmp_mod_refs.component_modules.empty?
-
# component_module_refs = cmp_mod_refs
-
# else
-
# component_module_refs = ModuleRefs.get_component_module_refs(module_branch)
-
# end
-
-
serialize_info_hash = Aux::hash_subset(opts,[:create_empty_module_refs])
-
if external_deps = opts[:external_dependencies]
-
if ambiguous = external_deps.ambiguous?
-
serialize_info_hash.merge!(:ambiguous => ambiguous)
-
end
-
if possibly_missing = external_deps.possibly_missing?
-
serialize_info_hash.merge!(:possibly_missing => possibly_missing)
-
end
-
end
-
serialize_info_hash.merge!(:create_empty_module_refs => true)
-
# TODO: for efficiency if have the parsed info can pass this to serialize_and_save_to_repo?
-
if new_commit_sha = component_module_refs.serialize_and_save_to_repo?(serialize_info_hash)
-
msg = opts[:message]||"The module refs file was updated by the server"
-
ModuleDSLInfo::UpdatedInfo.new(:msg => msg,:commit_sha => new_commit_sha)
-
end
-
end
-
-
#this updates the component module objects, not the dsl
-
1
def validate_includes_and_update_module_refs()
-
ret = Hash.new
-
external_deps = ExternalDependencies.new()
-
-
include_module_names = component_module_names_in_include_statements?()
-
# ModuleRefs::ComponentDSLForm will also find any parsing errors in the module refs file
-
-
ndx_cmr_info = ModuleRefs::ComponentDSLForm.get_ndx_module_info(@project_idh,@module_class,@module_branch,:include_module_names => include_module_names)
-
return ndx_cmr_info if is_parsing_error?(ndx_cmr_info)
-
-
# process includes (if they exist)
-
unless include_module_names.empty?
-
# find component modules in include_module_names that are missing
-
missing = include_module_names - ndx_cmr_info.keys
-
external_deps.merge!(:possibly_missing => missing) unless missing.empty?
-
-
# find any ambiguously mapped component modules
-
ambiguous = Hash.new
-
include_module_names.each do |module_name|
-
if match_info = ndx_cmr_info[module_name]
-
if match_info.match_type == :multiple_match
-
ambiguous[module_name] = match_info.match_array.map{|cmr|cmr.namespace}
-
end
-
end
-
end
-
external_deps.merge!(:ambiguous => ambiguous) unless ambiguous.empty?
-
end
-
-
# update the component_module_ref objects from elements of ndx_cmr_info that are unique
-
# cmr_update_els is set to content used to set module refs
-
cmr_update_els = ModuleRefs::ComponentDSLForm::Elements.new
-
ndx_cmr_info.each_value do |match_info|
-
# TODO: put in explanation why matching against both :dsl and :single_match
-
if [:dsl,:single_match].include?(match_info.match_type)
-
# do not put in module refs if not in included modules
-
if include_module_names.find{|module_name|matches_module?(match_info,module_name)}
-
cmr_update_els.add!(match_info.match_array)
-
end
-
end
-
end
-
ModuleRefs::Parse.update_component_module_refs_from_parse_objects(@module_class,@module_branch,cmr_update_els)
-
-
{:external_dependencies => external_deps}
-
end
-
-
1
private
-
# These are modules in the component module include section of dtk.model.yaml
-
1
def component_module_names_in_include_statements?()
-
# @input_hash is in normalized form
-
@input_hash.values.map{|v|(v['component_include_module']||{}).keys}.flatten(1).uniq
-
end
-
-
1
def matches_module?(match_info,module_name)
-
match_info.match_array.find{|r|r.component_module() == module_name}
-
end
-
end
-
end; end; end
-
-
1
module DTK
-
1
class BaseModule < Model
-
1
module VersionContextInfo
-
# returns a hash with keys: :repo,:branch,:implementation, :sha (optional)
-
1
def self.get_in_hash_form(components,assembly_instance)
-
impls = Component::IncludeModule.get_matching_implementations(assembly_instance,components.map{|r|r.id_handle()})
-
sha_info = get_sha_indexed_by_impl(components)
-
impls.map{|impl|hash_form(impl,sha_info[impl[:id]])}
-
end
-
-
1
private
-
1
def self.hash_form(impl,sha=nil)
-
hash = impl.hash_form_subset(:id,:repo,:branch,{:module_name=>:implementation})
-
sha ? hash.merge(:sha => sha) : hash
-
end
-
-
1
def self.get_sha_indexed_by_impl(components)
-
ret = Hash.new
-
return ret if components.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:locked_sha,:implementation_id],
-
:filter => [:oneof,:id,components.map{|r|r.id()}]
-
}
-
Model.get_objs(components.first.model_handle(),sp_hash).each do |r|
-
if sha = r[:locked_sha]
-
ret.merge!(r[:implementation_id] => sha)
-
end
-
end
-
ret
-
end
-
end
-
end
-
end
-
1
r8_require('../branch_names')
-
1
module DTK
-
1
class ModuleBranch < Model
-
1
r8_nested_require('branch','location')
-
1
include BranchNamesMixin
-
1
extend BranchNamesClassMixin
-
-
1
def self.common_columns()
-
10
[:id,:group_id,:display_name,:branch,:repo_id,:current_sha,:is_workspace,:type,:version,:ancestor_id,:external_ref,:dsl_parsed]
-
end
-
-
# TODO: should change type of self[:external_ref] to json
-
# but before check any side effect of change
-
1
def external_ref()
-
35
get_field?(:external_ref) && eval(self[:external_ref])
-
end
-
-
1
def external_ref_source()
-
35
if external_ref = external_ref()
-
22
if source = external_ref[:source]
-
22
source.gsub(/ /,'')
-
end
-
end
-
end
-
-
1
def get_type()
-
get_field?(:type).to_sym
-
end
-
-
1
def set_dsl_parsed!(boolean_val)
-
update(:dsl_parsed => boolean_val)
-
end
-
-
1
def dsl_parsed?()
-
get_field?(:dsl_parsed)
-
end
-
-
1
def get_module_repo_info()
-
repo = get_repo(:repo_name)
-
module_obj = get_module()
-
version = get_field?(:version)
-
opts = {:version => version, :module_namespace => module_obj.module_namespace()}
-
ModuleRepoInfo.new(repo,module_obj.module_name(),module_obj.id_handle(),self,opts)
-
end
-
-
1
def get_module()
-
row = get_obj(:cols => [:type,:parent_info])
-
type = row[:type].to_sym
-
# TODO: temp until for source of bug where component rather than component_module put in for type
-
if type == :component
-
type = :component_module
-
Log.error("Bug :component from :component_module on (#{row.inspect})")
-
end
-
row[type]
-
end
-
-
1
def get_module_name()
-
get_module().module_name()
-
end
-
-
# deletes both local and remore branch
-
1
def delete_instance_and_repo_branch()
-
RepoManager.delete_branch(self)
-
delete_instance(id_handle())
-
end
-
-
1
def update_current_sha_from_repo!()
-
current_sha = RepoManager.branch_head_sha(self)
-
update(:current_sha => current_sha)
-
self[:current_sha] = current_sha
-
current_sha
-
end
-
-
1
def update_external_ref(ext_ref)
-
update(:external_ref => ext_ref.to_s)
-
self[:external_ref] = ext_ref
-
end
-
-
1
def merge_changes_and_update_model?(component_module,branch_name_to_merge_from,opts={})
-
current_sha = self[:current_sha]
-
ret = get_module_repo_info()
-
diffs = RepoManager.diff(branch_name_to_merge_from,self)
-
diffs_summary = diffs.ret_summary()
-
-
# TODO: in addition to :any_updates or instead can send the updated sha and have client to use that to determine if client is up to date
-
return ret if diffs_summary.no_diffs?()
-
ret = ret.merge!(:any_updates => true, :fast_forward_change => true)
-
-
result = RepoManager.fast_foward_merge_from_branch(branch_name_to_merge_from,self)
-
if result == :merge_needed
-
if opts[:force]
-
RepoManager.hard_reset_to_branch(branch_name_to_merge_from,self)
-
ret.merge!(:fast_forward_change => false)
-
else
-
raise ErrorUsage.new("There is a merge conflict! Cannot push changes without using the --force option; THIS OPTION WILL WIPE OUT CHANGES IN THE BASE COMPONENT MODULE")
-
end
-
elsif result != :changed
-
raise Error.new("Unexpected result from fast_foward_merge_from_branch")
-
end
-
-
self[:current_sha] = diffs.b_sha
-
update(:current_sha => self[:current_sha])
-
-
impl_obj = get_implementation()
-
impl_obj.modify_file_assets(diffs_summary)
-
-
if diffs_summary.meta_file_changed?()
-
errors = ErrorUsage::Parsing.trap(:only_return_error=>true) do
-
component_module.parse_dsl_and_update_model(impl_obj,id_handle(),version(),:update_module_refs_from_file => true)
-
end
-
-
if errors
-
# reset base branch to previous sha
-
repo = self.get_repo()
-
repo.hard_reset_branch_to_sha(self, current_sha)
-
self.set_sha(current_sha)
-
-
# return parsing errors
-
ret.merge!(:dsl_parsing_errors => errors)
-
end
-
end
-
ret
-
end
-
-
# returns true if actual pull was needed
-
1
def pull_repo_changes?(commit_sha, force = false)
-
update_object!(:branch,:current_sha)
-
if is_set_to_sha?(commit_sha)
-
nil
-
else
-
merge_result = RepoManager.fast_foward_pull(self[:branch], force, self)
-
if merge_result == :merge_needed
-
raise Error.new("Merge problem exists between multiple clients editting the module (#{get_module().pp_module_name()})")
-
end
-
set_sha(commit_sha)
-
true
-
end
-
end
-
-
1
def is_set_to_sha?(commit_sha)
-
commit_sha == get_field?(:current_sha)
-
end
-
-
1
def set_sha(commit_sha)
-
update(:current_sha => commit_sha)
-
commit_sha
-
end
-
-
1
def version()
-
self.class.version_from_version_field(get_field?(:version))
-
end
-
1
def assembly_module_version?()
-
version_obj = version()
-
if version_obj.kind_of?(ModuleVersion::AssemblyModule)
-
version_obj
-
end
-
end
-
-
1
def version_print_form(opts=Opts.new)
-
default_version_string = opts[:default_version_string] # can be null
-
update_object!(:version)
-
has_default_version?() ? default_version_string : self[:version]
-
end
-
-
1
def matches_base_version?()
-
matches_version?(BaseVersion)
-
end
-
1
BaseVersion = nil
-
1
def matches_version?(version=nil)
-
update_object!(:version)
-
self[:version] == self.class.version_field(version)
-
end
-
-
1
def incrementally_update_component_dsl(augmented_objects,context={})
-
dsl_path,hash_content,fragment_hash = ModuleDSL.incremental_generate(self,augmented_objects,context)
-
serialize_and_save_to_repo?(dsl_path,hash_content)
-
fragment_hash
-
end
-
-
# updates repo if any changes and if so returns new commit_sha
-
# args could be either file_path,hash_content,file_format(optional) or single element which is an array
-
# having elements with keys :path, :hash_content, :format
-
1
def serialize_and_save_to_repo?(*args)
-
opts = Hash.new
-
files =
-
if args.size == 1
-
args[0]
-
else
-
path,hash_content,format_type,opts = args
-
format_type ||= dsl_format_type_form_path(path)
-
opts ||= Hash.new
-
[{:path => path,:hash_content => hash_content,:format_type => format_type}]
-
end
-
-
unless files.empty?
-
ambiguous_deps = opts[:ambiguous]||[]
-
missing_deps = opts[:possibly_missing]||[]
-
any_changes, new_cmp_refs, valid_existing, existing_names = false, nil, nil, []
-
files.each do |file_info|
-
content = Aux.serialize(file_info[:hash_content],file_info[:format_type])
-
-
# check if module_refs files exists already
-
existing_content = RepoManager.get_file_content({:path => file_info[:path]},self,{:no_error_if_not_found => true})
-
file_path = file_info[:path]
-
-
if existing_content
-
existing_c_hash = Aux.convert_to_hash(existing_content,file_info[:format_type])
-
if existing_c_hash && !existing_c_hash.kind_of?(ErrorUsage::Parsing) and existing_c_hash['component_modules']
-
valid_existing = true
-
end
-
end
-
-
# if module_refs file and content already exist then append new module_refs to existing
-
if valid_existing && opts[:update_module_refs] && file_path.eql?("module_refs.#{file_info[:format_type].to_s}")
-
existing_c_hash = Aux.convert_to_hash(existing_content,file_info[:format_type])
-
new_cmp_refs = file_info[:hash_content].clone
-
-
if new_cmp_refs[:component_modules] && existing_c_hash['component_modules']
-
new_cmp_refs[:component_modules].merge!(existing_c_hash['component_modules'])
-
end
-
-
content = Aux.serialize(new_cmp_refs,file_info[:format_type]) if new_cmp_refs
-
end
-
-
if valid_existing
-
existing_c_hash['component_modules'].each do |k,v|
-
existing_names << k if v
-
end
-
end
-
-
unless ambiguous_deps.empty?
-
ambiguous = process_ambiguous_dependencies(ambiguous_deps, file_info[:hash_content])
-
if file_info[:hash_content].empty?
-
content = ambiguous
-
else
-
if valid_existing
-
temp_ambiguous = ambiguous_deps.clone
-
temp_ambiguous.delete_if{|ad,n| existing_names.include?(ad.split('/').last)}
-
ambiguous = process_ambiguous_dependencies(temp_ambiguous, file_info[:hash_content])
-
end
-
content << ambiguous
-
end
-
end
-
-
unless missing_deps.empty?
-
missing = process_missing_dependencies(missing_deps, hash_content)
-
if file_info[:hash_content].empty?
-
content = missing
-
else
-
if valid_existing
-
temp_missing = missing_deps.clone
-
temp_missing.delete_if{|md| existing_names.include?(md.split('/').last)}
-
missing = process_missing_dependencies(temp_missing, hash_content)
-
end
-
content << missing
-
end
-
end
-
-
if file_info[:hash_content].empty? && ambiguous_deps.empty? && missing_deps.empty?
-
content = "---\ncomponent_modules:\n" unless valid_existing
-
end
-
-
any_change = RepoManager.add_file({:path => file_info[:path]},content,self)
-
any_changes = true if any_change
-
end
-
if any_changes
-
new_commit_sha = push_changes_to_repo()
-
new_commit_sha
-
end
-
end
-
end
-
-
1
def dsl_format_type_form_path(path)
-
extension = (path =~ /\.([^\.]+$)/; $1)
-
unless ret = FormatTypeFromExtension[extension]
-
raise Error.new("Cannot find format type from file path (#{path})")
-
end
-
ret
-
end
-
1
private :dsl_format_type_form_path
-
1
FormatTypeFromExtension = {
-
"json" => :json,
-
"yaml" => :yaml
-
}
-
-
1
def push_changes_to_repo()
-
commit_sha = RepoManager.push_changes(self)
-
set_sha(commit_sha) # returns commit_sha to calling fn
-
end
-
-
1
def process_ambiguous_dependencies(ambiguous, hash_content)
-
content = ""
-
content << "---\ncomponent_modules:\n" if hash_content.empty?
-
-
ambiguous.each do |module_name,namespaces|
-
name = module_name.to_s.split('/').last
-
content << " #{name}:\n"
-
count = 0
-
namespaces.each do |val|
-
count += 1
-
content << "# namespace: #{val}\n"
-
content << "# -- OR -- \n" if count < namespaces.size
-
end
-
end
-
-
content
-
end
-
-
1
def process_missing_dependencies(missing, hash_content)
-
content = ""
-
content << "---\ncomponent_modules:\n" if hash_content.empty?
-
-
missing.each do |module_name|
-
name = module_name.to_s.split('/').last
-
content << "# dependency from git import: #{module_name}\n"
-
content << "# #{name}:\n"
-
content << "# namespace: NAMESPACE\n"
-
end
-
-
content
-
end
-
-
1
private :push_changes_to_repo
-
-
1
def default_dsl_format_type()
-
index = (get_type() == :service_module ? :service : :component)
-
R8::Config[:dsl][index][:format_type][:default].to_sym
-
end
-
-
# creates if necessary a new branch from this (so new branch and this branch share history)
-
# returns repo for new branch; this just creates repo branch and does not update object model
-
# opts can have keys
-
# :sha - sha on base branch to branch from
-
1
def create_new_branch_from_this_branch?(project,base_repo,new_version,opts={})
-
branch_name = Location::Server::Local::workspace_branch_name(project,new_version)
-
RepoManager.add_branch_and_push?(branch_name,opts,self)
-
repo_for_version(base_repo,new_version)
-
end
-
-
1
def repo_for_version(base_repo,version)
-
base_repo #bakes in that different versions share same git repo
-
end
-
-
1
def self.get_component_modules_info(module_branch_idhs)
-
ret = Array.new
-
return ret if module_branch_idhs.nil? or module_branch_idhs.empty?
-
sp_hash = {
-
:cols => [:component_module_info],
-
:filter => [:oneof,:id,module_branch_idhs.map{|idh|idh.get_id()}]
-
}
-
sample_mb_idh = module_branch_idhs.first
-
get_objs(sample_mb_idh.createMH(),sp_hash).map do |r|
-
r[:component_module].merge(:repo => r[:repo])
-
end
-
end
-
-
1
def get_implementation(*added_cols)
-
update_object!(:repo_id,:branch)
-
cols = [:id,:display_name,:repo,:branch,:group_id]
-
cols += added_cols unless added_cols.empty?
-
sp_hash = {
-
:cols => cols,
-
:filter => [:and,[:eq, :repo_id, self[:repo_id]],[:eq, :branch, self[:branch]]]
-
}
-
Model.get_obj(model_handle(:implementation),sp_hash)
-
end
-
-
1
def get_repo(*added_cols)
-
update_object!(:repo_id)
-
cols = [:id,:display_name]
-
cols += added_cols unless added_cols.empty?
-
sp_hash = {
-
:cols => cols,
-
:filter => [:eq, :id, self[:repo_id]]
-
}
-
Model.get_obj(model_handle(:repo),sp_hash)
-
end
-
-
1
def get_service_module()
-
row = get_obj(:cols => [:service_module])
-
row && row[:service_module]
-
end
-
-
1
def get_assemblies()
-
get_objs(:cols => [:assemblies]).map{|r|r[:component]}
-
end
-
-
1
def get_module_refs()
-
sp_hash = {
-
:cols => [:id, :display_name, :namespace_info],
-
:filter => [:eq, :branch_id, self[:id]]
-
}
-
Model.get_objs(model_handle(:module_ref),sp_hash)
-
end
-
-
1
def self.get_namespace_info(id_handles)
-
ret = Hash.new
-
return ret if id_handles.empty?
-
sp_hash = {
-
:cols => [:id,:component_module_namespace_info],
-
:filter => [:oneof,:id,id_handles.map{|idh|idh.get_id}]
-
}
-
get_objs(id_handles.first.createMH(),sp_hash)
-
end
-
1
def get_namespace_info()
-
get_obj(:cols => [:component_module_namespace_info])
-
end
-
-
1
def self.get_component_workspace_branches(node_idhs)
-
sp_hash = {
-
# MOD_RESTRUCT: after get rid of lib branches might use below
-
# :cols => [:id,:display_name,:component_ws_module_branches],
-
:cols => [:id,:display_name,:component_module_branches], #temp which can return lib branches
-
:filter => [:oneof, :id, node_idhs.map{|idh|idh.get_id()}]
-
}
-
sample_node_idh = node_idhs.first()
-
node_rows = get_objs(sample_node_idh.createMH(),sp_hash)
-
# get rid of dups
-
node_rows.inject(Hash.new) do |h,r|
-
module_branch = r[:module_branch]
-
h[module_branch[:id]] ||= module_branch
-
h
-
end.values
-
end
-
-
1
def get_ancestor_branch?()
-
ret = nil
-
unless ancestor_branch_id = get_field?(:ancestor_id)
-
return ret
-
end
-
sp_hash = {
-
:cols => self.class.common_columns(),
-
:filter => [:eq,:id,ancestor_branch_id]
-
}
-
Model.get_obj(model_handle(),sp_hash)
-
end
-
-
1
def self.ret_create_hash(repo_idh,local,opts={})
-
ancestor_branch_idh = opts[:ancestor_branch_idh]
-
branch = local.branch_name
-
type = local.module_type.to_s
-
# TODO: temp until for source of bug where component rather than component_module put in for type
-
if type == 'component'
-
type = 'component_module'
-
Log.error_pp(["Bug :component from :component_module on",local,caller()[0..7]])
-
end
-
-
assigns = {
-
:display_name => branch,
-
:branch => branch,
-
:repo_id => repo_idh.get_id(),
-
:is_workspace => true,
-
:type => local.module_type.to_s,
-
:version => version_field(local.version)
-
}
-
assigns.merge!(:ancestor_id => ancestor_branch_idh.get_id()) if ancestor_branch_idh
-
ref = branch
-
{ref => assigns}
-
end
-
# TODO: ModuleBranch::Location: deprecate below for above
-
1
def self.ret_workspace_create_hash(project,type,repo_idh,opts={})
-
version = opts[:version]
-
ancestor_branch_idh = opts[:ancestor_branch_idh]
-
branch = workspace_branch_name(project,version)
-
assigns = {
-
:display_name => branch,
-
:branch => branch,
-
:repo_id => repo_idh.get_id(),
-
:is_workspace => true,
-
:type => type,
-
:version => version_field(version)
-
}
-
assigns.merge!(:ancestor_id => ancestor_branch_idh.get_id()) if ancestor_branch_idh
-
ref = branch
-
{ref => assigns}
-
end
-
-
# TODO: clean up; complication is that an augmented branch can be passed
-
1
def repo_and_branch()
-
repo = self[:repo]
-
cols = (self[:repo] ? [:branch] : [:branch,:repo_id])
-
update_object!(*cols)
-
unless repo
-
sp_hash = {
-
:cols => [:id,:display_name, :repo_name],
-
:filter => [:eq,:id,self[:repo_id]]
-
}
-
repo = Model.get_obj(model_handle(:repo),sp_hash)
-
end
-
repo_name = repo[:repo_name]||repo[:display_name]
-
[repo_name,self[:branch]]
-
end
-
-
# in case we change what schema the module and branch objects under
-
1
def self.module_id_col(module_type)
-
case module_type
-
when :service_module then :service_id
-
when :component_module then :component_id
-
else raise Error.new("Unexected module type (#{module_type})")
-
end
-
end
-
1
def module_id_col(module_type)
-
self.class.module_id_col(module_type)
-
end
-
end
-
end
-
#
-
# Classes that encapsulate information for each module or moulde branch where is its location clone and where is its remotes
-
#
-
1
module DTK
-
1
class ModuleBranch
-
1
class Location
-
1
r8_nested_require('location','params')
-
# above needed before below
-
1
r8_nested_require('location','local')
-
1
r8_nested_require('location','remote')
-
# above needed before below
-
1
r8_nested_require('location','server')
-
1
r8_nested_require('location','client')
-
-
1
attr_reader :local,:remote
-
1
private
-
1
def initialize(project,local_params=nil,remote_params=nil)
-
if local_params
-
@local = self.class::Local.new(project,local_params)
-
end
-
if remote_params
-
@remote = self.class::Remote.new(project,remote_params)
-
end
-
end
-
end
-
end
-
end
-
-
-
2
module DTK; class ModuleBranch
-
1
class Location
-
1
class Client < self
-
1
class Local < Location::Local
-
end
-
end
-
end
-
end; end
-
2
module DTK; class ModuleBranch
-
1
class Location
-
# keys: :module_type,:component_type?,:module_name,:version?,:namespace?
-
1
class LocalParams < Params
-
1
def component_type()
-
self[:component_type]
-
end
-
1
def initialize(local_params)
-
super
-
@component_type = local_params[:component_type]||ret_component_type(local_params[:module_type])
-
end
-
-
1
class Server < self
-
1
def create_local(project)
-
Location::Server::Local.new(project,self)
-
end
-
end
-
-
1
private
-
-
1
def legal_keys()
-
[:module_type,:component_type?,:module_name,:version?,:namespace?,:source_name?]
-
end
-
1
def ret_component_type(module_type)
-
case module_type()
-
when :service_module
-
:service_module
-
when :component_module
-
:puppet #TODO: hard wired
-
when :test_module
-
:test #TODO: hard wired
-
when :node_module
-
:node_module #TODO: hard wired
-
end
-
end
-
end
-
-
1
class Local < LocalParams
-
1
attr_reader :project
-
1
def initialize(project,local_params)
-
super(local_params)
-
@project = project
-
end
-
1
def branch_name()
-
@branch_name ||= ret_branch_name()
-
end
-
1
def private_user_repo_name()
-
@private_user_repo_name ||= ret_private_user_repo_name()
-
end
-
end
-
end
-
end; end
-
2
module DTK; class ModuleBranch
-
1
class Location
-
1
class Params < Hash
-
# module_name, version, and namespace are common params for local and remote
-
1
def module_name(opts={})
-
ret = self[:module_name]
-
if opts[:with_namespace]
-
unless ns = module_namespace_name()
-
raise Error.new("Unexpected that self does not have namespace set")
-
end
-
ret = Namespace.join_namespace(ns, ret)
-
end
-
ret
-
end
-
-
1
def module_namespace_name()
-
self[:namespace]
-
end
-
-
1
def module_type()
-
self[:module_type]
-
end
-
1
def version()
-
self[:version]
-
end
-
1
def namespace()
-
self[:namespace]
-
end
-
1
def source_name()
-
self[:source_name]
-
end
-
1
def initialize(params)
-
unless params.kind_of?(self.class)
-
validate(params)
-
end
-
replace(params)
-
end
-
-
1
def pp_module_name(opts={})
-
ret = module_name
-
if version
-
ret << "(#{version})"
-
end
-
-
module_namespace_name ? "#{module_namespace_name}:#{ret}" : ret
-
end
-
-
1
private
-
1
def validate(params)
-
unless (bad_keys = params.keys - all_keys()).empty?
-
raise Error.new("Illegal key(s): #{bad_keys.join(',')}")
-
end
-
missing_required = required_keys().select{|key|params[key].nil?}
-
unless missing_required.empty?
-
raise Error.new("Required key(s): #{missing_required.join(',')}")
-
end
-
end
-
1
def all_keys()
-
legal_keys().map{|k|optional?(k)||k}
-
end
-
1
def required_keys()
-
legal_keys().reject{|k|optional?(k)}
-
end
-
1
def optional?(k)
-
k = k.to_s
-
if k =~ /\?$/
-
k.gsub(/\?$/,'').to_sym
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class ModuleBranch
-
1
class Location
-
1
class RemoteParams < Params
-
# keys: :module_type,:module_name,:remote_repo_base,:namespace,:version?
-
1
def remote_repo_base()
-
self[:remote_repo_base]
-
end
-
-
1
class DTKNCatalog < self
-
1
def create_remote(project)
-
Remote::DTKNCatalog.new(project,self)
-
end
-
1
private
-
1
def legal_keys()
-
[:module_type,:module_name,:remote_repo_base,:namespace,:version?]
-
end
-
end
-
-
1
class TenantCatalog < self
-
1
def create_remote(project)
-
Remote::TenantCatalog.new(project,self)
-
end
-
1
private
-
1
def legal_keys()
-
[:module_type,:module_name,:remote_repo_base,:namespace?,:version?]
-
end
-
end
-
end
-
-
1
class Remote
-
1
def self.includes?(obj)
-
5
obj.kind_of?(DTKNCatalog) or obj.kind_of?(TenantCatalog)
-
end
-
-
1
module RemoteMixin
-
1
attr_reader :project
-
1
def initialize(project,remote_params)
-
super(remote_params)
-
@project = project
-
end
-
1
def branch_name()
-
@branch_name ||= ret_branch_name()
-
end
-
-
1
def remote_ref()
-
@remote_ref ||= ret_remote_ref()
-
end
-
-
1
def repo_url()
-
@repo_url ||= ret_repo_url()
-
end
-
-
1
def set_repo_name!(remote_repo_name)
-
if @repo_name
-
raise Error.new("Not expected that @repo_name is non nil")
-
end
-
@repo_name = remote_repo_name
-
self
-
end
-
1
def repo_name()
-
if @repo_name.nil?
-
raise Error.new("Not expected that @repo_name is nil")
-
end
-
@repo_name
-
end
-
end
-
1
r8_nested_require('remote','dtkn_catalog')
-
1
r8_nested_require('remote','tenant_catalog')
-
end
-
end
-
end; end
-
-
3
module DTK; class ModuleBranch; class Location
-
1
class Remote
-
1
class DTKNCatalog < RemoteParams::DTKNCatalog
-
1
include RemoteMixin
-
-
1
def get_linked_workspace_branch_obj?(module_obj)
-
filter = {
-
:version => version,
-
:remote_namespace => namespace
-
}
-
module_obj.get_augmented_workspace_branch(:filter => filter)
-
end
-
1
private
-
1
def ret_repo_url()
-
RepoManagerClient.repo_url_ssh_access(repo_name())
-
end
-
1
def ret_remote_ref()
-
"#{remote_repo_base}--#{namespace}"
-
end
-
1
def ret_branch_name()
-
if version.nil? or version == HeadBranchName
-
HeadBranchName
-
else
-
"v#{version}"
-
end
-
end
-
1
HeadBranchName = 'master'
-
end
-
end
-
end; end; end
-
3
module DTK; class ModuleBranch; class Location
-
1
class Remote
-
1
class TenantCatalog < RemoteParams::TenantCatalog
-
1
include RemoteMixin
-
end
-
end
-
end; end; end
-
-
2
module DTK; class ModuleBranch
-
1
class Location
-
1
class Server < self
-
1
def initialize(project,local_params=nil,remote_params=nil)
-
super
-
end
-
1
class Local < Location::Local
-
1
def self.workspace_branch_name(project,version=nil)
-
ret_branch_name(project,version)
-
end
-
-
1
def self.private_user_repo_name(username, module_type, module_name, module_namespace)
-
repo_name = "#{username}-#{module_namespace}-#{module_name}"
-
-
case module_type
-
when :service_module
-
return "sm-#{repo_name}"
-
when :test
-
return "tm-#{repo_name}"
-
else
-
repo_name
-
end
-
end
-
-
1
private
-
-
1
def ret_branch_name()
-
self.class.ret_branch_name(@project,version())
-
end
-
-
1
def ret_private_user_repo_name()
-
username = CurrentSession.new.get_username()
-
namespace_name = module_namespace_name() || Namespace.default_namespace_name
-
Local.private_user_repo_name(username, @component_type, module_name(), namespace_name)
-
end
-
-
#===== helper methods
-
-
1
def self.ret_branch_name(project,version)
-
user_prefix = "workspace-#{project.get_field?(:ref)}"
-
if version.kind_of?(ModuleVersion::AssemblyModule)
-
assembly_suffix = "--assembly-#{version.assembly_name}"
-
"#{user_prefix}#{assembly_suffix}"
-
else
-
version_suffix = ((version and version != VersionFieldDefault)? "-v#{version}" : "")
-
"#{user_prefix}#{version_suffix}"
-
end
-
end
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class ComponentModule < BaseModule
-
1
def self.model_type()
-
4
:component_module
-
end
-
1
def self.component_type()
-
:puppet #hardwired
-
end
-
1
def component_type()
-
104
:puppet #hardwired
-
end
-
-
1
def self.module_specific_type(config_agent_type)
-
config_agent_type
-
end
-
-
1
class DSLParser < DTK::ModuleDSLParser
-
1
def self.module_type()
-
:component_module
-
end
-
1
def self.module_class
-
ModuleDSL
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleDSLParser
-
1
ExtMod = ::DtkCommon::DSL
-
1
FileParserMethods = [:generate_hash]
-
1
DirectoryParserMethods = [:parse_directory]
-
-
1
def self.parse_directory(module_branch,file_type,opts={})
-
repo_full_path,branch = RepoManager.repo_full_path_and_branch(module_branch)
-
dir_parser = ExtMod::DirectoryParser::Git.new(module_type(),repo_full_path,branch)
-
parsed_info = dir_parser.parse_directory(file_type,opts)||{}
-
-
return parsed_info if module_class::ParsingError.is_error?(parsed_info)
-
-
file_type ?
-
Output.new(file_type,parsed_info) :
-
parsed_info.inject(Hash.new){|h,(file_type,v)|h.merge(file_type => Output.new(file_type,v))}
-
end
-
-
1
def self.default_rel_path?(file_type)
-
ExtMod::DirectoryParser::Git.default_rel_path?(module_type(),file_type)
-
end
-
-
1
def self.generate_hash(file_type,output_array)
-
ExtMod::FileParser.generate_hash(file_type,output_array)
-
end
-
-
1
def self.file_parser_output_array_class()
-
ExtMod::FileParser::OutputArray
-
end
-
-
1
private
-
1
def self.module_type()
-
raise Error.new("Abstract method that should not be called")
-
end
-
1
def self.module_class
-
raise Error.new("Abstract method that should not be called")
-
end
-
-
1
class Output < Array
-
1
def initialize(file_type,object)
-
super()
-
@file_type = file_type
-
if object.kind_of?(ExtMod::FileParser::OutputArray)
-
object.each{|r|self << r}
-
elsif object.kind_of?(Hash)
-
# TODO: deprecate
-
object.each_pair do |component_module,info|
-
self << info.merge(:component_module => component_module)
-
end
-
else
-
raise Error.new("Not implemented yet: Output parser for #{object.class}")
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ExternalDependencies < Hash
-
1
def initialize(hash={})
-
super()
-
replace(pruned_hash(hash)) unless hash.empty?
-
end
-
1
KeysProblems = [:inconsistent,:possibly_missing,:ambiguous]
-
1
KeysOk = [:component_module_refs]
-
1
KeysAll = KeysProblems+KeysOk
-
-
1
def any_errors?()
-
!!KeysProblems.find{|k|has_data?(self[k])}
-
end
-
-
1
def ambiguous?()
-
self[:ambiguous]
-
end
-
1
def possibly_missing?()
-
self[:possibly_missing]
-
end
-
-
1
def pruned_hash(hash)
-
ret = Hash.new
-
KeysAll.each do |k|
-
v = hash[k]
-
ret.merge!(k => v) if has_data?(v)
-
end
-
ret
-
end
-
-
1
private
-
1
def has_data?(val)
-
!val.nil? and (!val.kind_of?(Array) or !val.empty?())
-
end
-
end
-
end
-
-
1
r8_nested_require('mixins','remote')
-
1
r8_nested_require('mixins','create')
-
1
r8_nested_require('mixins','gitolite')
-
1
r8_nested_require('mixins','get_branch')
-
1
r8_nested_require('utils','list_method')
-
-
#
-
# Mixins agregation point, and refelected on service_module and component_module classes.
-
#
-
-
1
module DTK
-
-
#
-
# Instance Mixins
-
#
-
-
1
module ModuleMixin
-
1
include ModuleMixins::Remote::Instance
-
1
include ModuleMixins::Create::Instance
-
1
include ModuleMixins::Gitolite
-
1
include ModuleMixins::GetBranchMixin
-
-
1
def ret_clone_update_info(version=nil)
-
CloneUpdateInfo.new(self,version)
-
end
-
-
#
-
# Get full module name
-
#
-
1
def full_module_name()
-
self.class.ndx_full_module_names([id_handle]).values.first
-
end
-
-
#
-
# returns Array with: name, namespace, version
-
#
-
1
def get_basic_info(opts=Opts.new)
-
sp_hash = {
-
:cols => [:id, :display_name, :version, :remote_repos],
-
:filter => [:eq,:id, id()]
-
}
-
-
rows = get_objs(sp_hash)
-
unless match = GetBasicInfo.find_match(rows,opts)
-
raise Error.new("Unexpected that there is no info associated with module")
-
end
-
match
-
end
-
-
1
module GetBasicInfo
-
#
-
# returns Array with: name, namespace, version
-
#
-
1
def self.find_match(rows,opts)
-
remote_namespace = opts[:remote_namespace]
-
match =
-
if rows.size == 1
-
rows.first
-
elsif rows.size > 1
-
rows.find{|r| remote_namespace_match?(r,remote_namespace)}
-
end
-
if match
-
name_namespace_version(match)
-
end
-
end
-
-
1
private
-
1
def self.name_namespace_version(row)
-
[row[:display_name], remote_namespace(row), (row[:module_branch]||{})[:version]]
-
end
-
-
1
def self.remote_namespace_match?(row,remote_namespace=nil)
-
if remote_namespace
-
remote_namespace(row) == remote_namespace
-
else
-
repo_remote(row)[:is_default]
-
end
-
end
-
-
1
def self.repo_remote(row)
-
row[:repo_remote]||{}
-
end
-
1
def self.remote_namespace(row)
-
repo_remote(row)[:repo_namespace]
-
end
-
end
-
-
##
-
# Returns local and remote versions for module
-
#
-
-
1
def local_and_remote_versions(client_rsa_pub_key = nil, opts={})
-
Log.error("TODO: see if namespace treatment must be updated")
-
module_name, remote_versions = nil, []
-
-
# get local versions list
-
local_versions = get_objs(:cols => [:version_info]).map do |r|
-
v = r[:module_branch].version()
-
v.nil? ? "CURRENT" : v
-
end
-
# get all remote modules versions, and take only versions for current component module name
-
info = self.class.info(model_handle(), id(), opts)
-
module_name = info[:remote_repos].first[:repo_name].gsub(/\*/,'').strip() unless info[:remote_repos].empty?
-
remote_versions = self.class.list_remotes(model_handle, client_rsa_pub_key).select{|r|r[:display_name]==module_name}.collect{|v_remote| ModuleBranch.version_from_version_field(v_remote[:versions])}.map!{|v| v.nil? ? "CURRENT" : v} if module_name
-
-
local_hash = {:namespace => "local", :versions => local_versions.flatten}
-
remote_hash = {:namespace => "remote", :versions => remote_versions}
-
-
versions = [local_hash]
-
versions << remote_hash unless remote_versions.empty?
-
-
versions
-
end
-
-
1
def get_linked_remote_repos(opts={})
-
(get_augmented_workspace_branch(opts.merge(:include_repo_remotes => true))||{})[:repo_remotes]||[]
-
end
-
-
1
def default_linked_remote_repo()
-
get_linked_remote_repos(:is_default => true).first
-
end
-
-
1
def update_model_from_clone_changes?(commit_sha,diffs_summary,version,opts={})
-
# do pull and see if any changes need the model to be updated
-
force = opts[:force]
-
module_branch = get_workspace_module_branch(version)
-
pull_was_needed = module_branch.pull_repo_changes?(commit_sha, force)
-
-
parse_needed = (opts[:force_parse] or !module_branch.dsl_parsed?())
-
update_from_includes = opts[:update_from_includes]
-
return unless pull_was_needed or parse_needed or update_from_includes
-
-
opts_update = Aux.hash_subset(opts,[:do_not_raise,:modification_type,:force_parse,:auto_update_module_refs,:dsl_parsed_false,:update_module_refs_from_file,:update_from_includes,:current_branch_sha,:service_instance_module,:task_action])
-
update_model_from_clone_changes(commit_sha,diffs_summary,module_branch,version,opts_update)
-
end
-
-
1
def get_project()
-
# caching
-
return self[:project] if self[:project]
-
update_object!(:project_project_id,:display_name) #including :display_name is opportunistic
-
if project_id = self[:project_project_id]
-
self[:project] = id_handle(:model_name => :project, :id => project_id).create_object()
-
end
-
end
-
-
# TODO: ModuleBranch::Location : need to paramterize this on branch
-
# raises exception if more repos found
-
1
def get_repo!()
-
repos = get_repos()
-
-
unless repos.size == 1
-
raise Error.new("unexpected that number of matching repos is not equal to 1")
-
end
-
-
return repos.first()
-
end
-
-
1
def get_repos()
-
get_objs_uniq(:repos)
-
end
-
-
1
def get_implementations()
-
1
get_objs_uniq(:implementations)
-
end
-
-
1
def module_type()
-
self.class.module_type()
-
end
-
-
1
def module_name()
-
36
get_field?(:display_name)
-
end
-
-
1
def module_namespace()
-
get_field?(:namespace)[:display_name]
-
end
-
-
1
def module_namespace_obj()
-
get_field?(:namespace)
-
end
-
-
1
def pp_module_name(version=nil)
-
self.class.pp_module_name(module_name(),version)
-
end
-
-
1
def pp_module_branch_name(module_branch)
-
module_branch.update_object!(:version)
-
version = (module_branch.has_default_version?() ? nil : module_branch[:version])
-
self.class.pp_module_name(module_name(),version)
-
end
-
-
1
def set_dsl_parsed!(boolean_val)
-
update(:dsl_parsed => boolean_val)
-
end
-
-
1
def dsl_parsed?()
-
get_field?(:dsl_parsed)
-
end
-
-
# assumed that all raw_module_rows agree on all except repo_remote
-
1
def aggregate_by_remote_namespace(raw_module_rows,opts={})
-
ret = nil
-
# raw_module_rows should have morea than 1 row and should agree on all fields aside from :repo_remote
-
if raw_module_rows.empty?()
-
raise Error.new("Unexepected that raw_module_rows is empty")
-
end
-
namespace = (opts[:filter]||{})[:remote_namespace]
-
-
repo_remotes = raw_module_rows.map do |e|
-
if repo_remote = e.delete(:repo_remote)
-
if namespace.nil? or namespace == repo_remote[:repo_namespace]
-
repo_remote
-
end
-
end
-
end.compact
-
# if filtering by namespace (tested by namespace is non-null) and nothing matched then return ret (which is nil)
-
# TODO: should we return nil when just repo_remotes.empty?
-
if namespace and repo_remotes.empty?
-
return ret
-
end
-
-
raw_module_rows.first.merge(:repo_remotes => repo_remotes)
-
end
-
end
-
-
#
-
# Class Mixins
-
#
-
1
module ModuleClassMixin
-
-
1
include ModuleMixins::Remote::Class
-
1
include ModuleMixins::Create::Class
-
1
include ModuleMixins::GetBranchClassMixin
-
-
1
def component_type()
-
Log.info_pp(["#TODO: ModuleBranch::Location: deprecate for this being in ModuleBranch::Location local params",caller[0..4]])
-
case module_type()
-
when :service_module
-
:service_module
-
when :component_module
-
:puppet #TODO: hard wired
-
when :test_module
-
:puppet #TODO: hard wired
-
when :node_module
-
:puppet #TODO: hard wired
-
end
-
end
-
-
1
def module_type()
-
5
model_name()
-
end
-
-
1
def check_valid_id(model_handle,id)
-
check_valid_id_default(model_handle,id)
-
end
-
-
1
def name_to_id(model_handle,name_or_full_module_name,namespace=nil)
-
1
namespace_x, name = Namespace.full_module_name_parts?(name_or_full_module_name)
-
1
unless namespace ||= namespace_x
-
raise ErrorUsage.new("Cannot find namespace!")
-
end
-
-
1
namespace_obj = Namespace.find_by_name(model_handle.createMH(:namespace), namespace)
-
1
raise ErrorUsage.new("Namespace (#{namespace_x}) does not exist!") unless namespace_obj
-
-
1
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,[:eq, :namespace_id, namespace_obj.id],[:eq, :display_name, name]]
-
}
-
1
name_to_id_helper(model_handle,name,sp_hash)
-
end
-
-
# arguments are module idhs
-
1
def ndx_full_module_names(idhs)
-
ret = Hash.new
-
return ret if idhs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:namespace],
-
:filter => [:oneof, :id,idhs.map{|idh|idh.get_id()}]
-
}
-
mh = idhs.first.createMH()
-
get_objs(mh,sp_hash).inject(Hash.new) do |h,row|
-
namespace = row[:namespace]
-
module_name = row[:display_name]
-
full_module_name = (namespace ? Namespace.join_namespace(namespace[:display_name], module_name) : module_name)
-
h.merge(row[:id] => full_module_name)
-
end
-
end
-
-
1
def info(target_mh, id, opts={})
-
remote_repo_cols = [:id, :display_name, :version, :remote_repos, :dsl_parsed]
-
components_cols = [:id, :display_name, :version, :dsl_parsed]
-
project_idh = opts[:project_idh]
-
namespaces = []
-
-
sp_hash = {
-
:cols => remote_repo_cols,
-
:filter => [:eq,:id,id]
-
}
-
-
response = get_objs(target_mh, sp_hash.merge(opts))
-
-
# if there are no remotes just get component info
-
if response.empty?
-
sp_hash[:cols] = components_cols
-
response = get_objs(target_mh, sp_hash.merge(opts))
-
else
-
# we sort in ascending order, last remote is default one
-
# TODO: need to make more sophisticated so we dont end up comparing a '' to a date
-
response.sort { |a,b| ((b[:repo_remote]||{})[:created_at]||'') <=> ((a[:repo_remote]||{})[:created_at]||'')}
-
-
# we switch to ascending order
-
response.each_with_index do |e,i|
-
display_name = (e[:repo_remote]||{})[:display_name]
-
prefix = ( i == 0 ? "*" : " ")
-
namespaces << { :repo_name => "#{prefix} #{display_name}" }
-
end
-
end
-
-
filter_list!(response) if respond_to?(:filter_list!)
-
response.each{|r|r.merge!(:type => r.component_type()) if r.respond_to?(:component_type)}
-
response = ModuleUtils::ListMethod.aggregate_detail(response,project_idh,model_type(),Opts.new(:include_versions => true))
-
-
ret = response.first || {}
-
ret[:versions] = "CURRENT" unless ret[:versions]
-
ret.delete_if { |k,v| [:repo,:module_branch,:repo_remote].include?(k) }
-
# [Haris] Due to join condition with module.branch we can have situations where we have many versions
-
# of module with same remote branch, with 'uniq' we iron that out
-
-
ret.merge!(:remote_repos => namespaces.uniq ) if namespaces
-
ret
-
end
-
-
-
1
def list(opts=opts.new)
-
3
diff = opts[:diff]
-
3
namespace = opts[:namespace]
-
3
project_idh = opts.required(:project_idh)
-
3
remote_repo_base = opts[:remote_repo_base]
-
3
include_remotes = opts.array(:detail_to_include).include?(:remotes)
-
3
include_versions = opts.array(:detail_to_include).include?(:versions)
-
3
include_any_detail = ((include_remotes or include_versions) ? true : nil)
-
-
# cols = [:id, :display_name, :namespace_id, :dsl_parsed, :namespace, include_any_detail && :module_branches_with_repos].compact
-
3
cols = [:id, :display_name, :namespace_id, :namespace, include_any_detail && :module_branches_with_repos].compact
-
3
unsorted_ret = get_all(project_idh,cols)
-
3
unless include_versions
-
# prune all but the base module branch
-
107
unsorted_ret.reject!{|r| r[:module_branch] and r[:module_branch][:version] != ModuleBranch.version_field_default()}
-
end
-
-
# if namespace provided with list command filter before aggregating details
-
3
unsorted_ret = filter_by_namespace(unsorted_ret,namespace) if namespace
-
-
3
filter_list!(unsorted_ret) if respond_to?(:filter_list!)
-
3
unsorted_ret.each do |r|
-
104
r.merge!(:type => r.component_type()) if r.respond_to?(:component_type)
-
-
104
if r[:namespace]
-
104
r[:display_name] = Namespace.join_namespace(r[:namespace][:display_name], r[:display_name])
-
end
-
-
104
r[:dsl_parsed] = r[:module_branch][:dsl_parsed] if r[:module_branch]
-
end
-
-
3
if include_any_detail
-
1
opts_aggr = Opts.new(
-
:include_remotes => include_remotes,
-
:include_versions => include_versions,
-
:remote_repo_base => remote_repo_base,
-
:diff => diff
-
)
-
1
unsorted_ret = ModuleUtils::ListMethod.aggregate_detail(unsorted_ret,project_idh,model_type(),opts_aggr)
-
end
-
-
440
unsorted_ret.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def get_all(project_idh,cols=nil)
-
3
get_all_with_filter(project_idh,:cols => cols)
-
end
-
-
1
def get_all_with_filter(project_idh,opts={})
-
3
filter = [:eq, :project_project_id, project_idh.get_id()]
-
3
if opts[:filter]
-
filter = [:and,filter,opts[:filter]]
-
end
-
3
sp_hash = {
-
:cols => add_default_cols?(opts[:cols]),
-
:filter => filter
-
}
-
3
mh = project_idh.createMH(model_type())
-
3
get_objs(mh,sp_hash)
-
end
-
-
1
def filter_by_namespace(object_list,namespace)
-
return object_list if namespace.nil? || namespace.strip.empty?
-
-
object_list.select do |el|
-
if el[:namespace]
-
# these are local modules and have namespace object
-
namespace.eql?(el[:namespace][:display_name])
-
else
-
el[:display_name].match(/#{namespace}\//)
-
end
-
end
-
end
-
-
1
def add_user_direct_access(model_handle,rsa_pub_key,username=nil)
-
repo_user,match = RepoUser.add_repo_user?(:client, model_handle.createMH(:repo_user),{:public => rsa_pub_key},username)
-
model_name = model_handle[:model_name]
-
-
repo_user.update_direct_access(model_name,true)
-
repos = get_all_repos(model_handle)
-
unless repos.empty?
-
repo_names = repos.map{|r|r[:repo_name]}
-
RepoManager.set_user_rights_in_repos(repo_user[:username],repo_names,DefaultAccessRights)
-
-
repos.map{|repo|RepoUserAcl.update_model(repo,repo_user,DefaultAccessRights)}
-
end
-
return match, repo_user
-
end
-
-
1
DefaultAccessRights = "RW+"
-
-
1
def remove_user_direct_access(model_handle, username)
-
repo_user = RepoUser.get_matching_repo_user(model_handle.createMH(:repo_user),:username => username)
-
raise ErrorUsage.new("User '#{username}' does not exist") unless repo_user
-
# return unless repo_user
-
-
model_name = model_handle[:model_name]
-
return unless repo_user.has_direct_access?(model_name)
-
-
# confusing since it is going to gitolite
-
RepoManager.delete_user(username)
-
-
repos = get_all_repos(model_handle)
-
unless repos.empty?
-
repo_names = repos.map{|r|r[:repo_name]}
-
RepoManager.remove_user_rights_in_repos(username,repo_names)
-
# repo user acls deleted by foriegn key cascade
-
end
-
-
if repo_user.any_direct_access_except?(model_name)
-
repo_user.update_direct_access(model_name,false)
-
else
-
delete_instance(repo_user.id_handle())
-
end
-
end
-
-
1
def module_repo_info(repo,module_and_branch_info,opts={})
-
info = module_and_branch_info #for succinctness
-
branch_obj = info[:module_branch_idh].create_object()
-
ModuleRepoInfo.new(repo,info[:module_name],info[:module_idh],branch_obj,opts)
-
end
-
-
# can be overwritten
-
# TODO: ModuleBranch::Location: deprecate
-
1
def module_specific_type(config_agent_type)
-
module_type()
-
end
-
1
private :module_specific_type
-
-
1
def pp_module_name(module_name,version=nil)
-
version ? "#{module_name} (#{version})" : module_name
-
end
-
-
1
def if_module_exists!(project_idh, module_name, module_namespace, error_message)
-
module_obj = module_exists?(project_idh, module_name, module_namespace)
-
-
if module_obj
-
raise ErrorUsage.new(error_message)
-
end
-
-
false
-
end
-
-
1
def module_exists?(project_idh, module_name, module_namespace)
-
unless project_idh[:model_name] == :project
-
raise Error.new("MOD_RESTRUCT: module_exists? should take a project, not a (#{project_idh[:model_name]})")
-
end
-
-
namespace_obj = Namespace.find_or_create(project_idh.createMH(:namespace), module_namespace)
-
-
sp_hash = {
-
:cols => [:id, :display_name, :dsl_parsed],
-
:filter => [ :and,
-
[:eq, :project_project_id, project_idh.get_id()],
-
[:eq, :display_name, module_name],
-
[:eq, :namespace_id, namespace_obj.id()]
-
]
-
}
-
-
get_obj(project_idh.createMH(model_name()),sp_hash)
-
end
-
-
1
private
-
1
def get_all_repos(mh)
-
get_objs(mh,{:cols => [:repos]}).inject(Hash.new) do |h,r|
-
repo = r[:repo]
-
h[repo[:id]] ||= repo
-
h
-
end.values
-
end
-
end
-
-
1
class ModuleRepoInfo < Hash
-
1
def initialize(repo,module_name,module_idh,branch_obj,opts={})
-
super()
-
repo_name = repo.get_field?(:repo_name)
-
module_namespace = opts[:module_namespace]
-
full_module_name = module_namespace ? Namespace.join_namespace(module_namespace, module_name) : nil
-
hash = {
-
:repo_id => repo[:id],
-
:repo_name => repo_name,
-
:module_id => module_idh.get_id(),
-
:module_name => module_name,
-
:module_namespace => module_namespace,
-
:full_module_name => full_module_name,
-
:module_branch_idh => branch_obj.id_handle(),
-
:repo_url => RepoManager.repo_url(repo_name),
-
:workspace_branch => branch_obj.get_field?(:branch),
-
:branch_head_sha => RepoManager.branch_head_sha(branch_obj)
-
}
-
if version = opts[:version]
-
hash.merge!(:version => version)
-
if assembly_name = version.respond_to?(:assembly_name) && version.assembly_name()
-
hash.merge!(:assembly_name => assembly_name)
-
end
-
end
-
replace(hash)
-
end
-
end
-
-
1
class CloneUpdateInfo < ModuleRepoInfo
-
1
def initialize(module_obj,version=nil)
-
aug_branch = module_obj.get_augmented_workspace_branch(:filter => {:version => version})
-
opts = {:version => version, :module_namespace => module_obj.module_namespace()}
-
super(aug_branch[:repo],aug_branch[:module_name],module_obj.id_handle(),aug_branch,opts)
-
self[:commit_sha] = aug_branch[:current_sha]
-
end
-
end
-
end
-
2
module DTK; module ModuleMixins
-
1
module Create
-
end
-
1
module Create::Class
-
# opts has key :config_agent_type
-
1
def create_module(project,local_params,opts={})
-
local = local_params.create_local(project)
-
namespace = local_params.namespace
-
module_name = local_params.module_name
-
project_idh = project.id_handle()
-
-
module_exists = module_exists?(project_idh, module_name, namespace)
-
if module_exists and not opts[:no_error_if_exists]
-
full_module_name = Namespace.join_namespace(namespace,module_name)
-
raise ErrorUsage.new("Module (#{full_module_name}) cannot be created since it exists already")
-
end
-
-
create_opts = {
-
:create_branch => local.branch_name(),
-
:push_created_branch => true,
-
:donot_create_master_branch => true,
-
:delete_if_exists => true,
-
:namespace_name => namespace
-
}
-
if copy_files_info = opts[:copy_files]
-
create_opts.merge!(:copy_files => copy_files_info)
-
end
-
repo_user_acls = RepoUser.authorized_users_acls(project_idh)
-
local_repo_obj = Repo::WithBranch.create_workspace_repo(project_idh,local,repo_user_acls,create_opts)
-
-
repo_idh = local_repo_obj.id_handle()
-
module_and_branch_info = create_module_and_branch_obj?(project,repo_idh,local)
-
-
opts_info = { :version=>local.version, :module_namespace => local.namespace }
-
module_and_branch_info.merge(:module_repo_info => module_repo_info(local_repo_obj,module_and_branch_info,opts_info))
-
end
-
-
1
def create_module_and_branch_obj?(project,repo_idh,local,ancestor_branch_idh=nil)
-
project_idh = project.id_handle()
-
module_name = local.module_name
-
namespace = Namespace.find_or_create(project.model_handle(:namespace), local.module_namespace_name)
-
ref = local.module_name(:with_namespace=>true)
-
opts = Hash.new
-
opts.merge!(:ancestor_branch_idh => ancestor_branch_idh) if ancestor_branch_idh
-
mb_create_hash = ModuleBranch.ret_create_hash(repo_idh,local,opts)
-
version_field = mb_create_hash.values.first[:version]
-
-
# create module and branch (if needed)
-
fields = {
-
:display_name => module_name,
-
:module_branch => mb_create_hash,
-
:namespace_id => namespace.id()
-
}
-
-
create_hash = {
-
model_name.to_s() => {
-
ref => fields
-
}
-
}
-
input_hash_content_into_model(project_idh,create_hash)
-
-
module_branch = get_module_branch_from_local(local)
-
module_idh = project_idh.createIDH(:model_name => model_name(),:id => module_branch[:module_id])
-
# TODO: ModuleBranch::Location: see if after refactor version field needed
-
# TODO: ModuleBranch::Location: ones that come from local can be omitted
-
{:version => version_field, :module_name => module_name, :module_idh => module_idh,:module_branch_idh => module_branch.id_handle()}
-
end
-
-
# TODO: ModuleBranch::Location: deprecate below for above
-
1
def create_ws_module_and_branch_obj?(project, repo_idh, module_name, input_version, namespace, ancestor_branch_idh=nil)
-
project_idh = project.id_handle()
-
-
ref = Namespace.join_namespace(namespace.display_name(),module_name)
-
module_type = model_name.to_s
-
opts = {:version => input_version}
-
opts.merge!(:ancestor_branch_idh => ancestor_branch_idh) if ancestor_branch_idh
-
mb_create_hash = ModuleBranch.ret_workspace_create_hash(project,module_type,repo_idh,opts)
-
version = mb_create_hash.values.first[:version]
-
-
fields = {
-
:display_name => module_name,
-
:module_branch => mb_create_hash,
-
:namespace_id => namespace.id()
-
}
-
-
create_hash = {
-
model_name.to_s => {
-
ref => fields
-
}
-
}
-
-
input_hash_content_into_model(project_idh,create_hash)
-
-
module_branch = get_workspace_module_branch(project,module_name,version,namespace)
-
module_idh = project_idh.createIDH(:model_name => model_name(),:id => module_branch[:module_id])
-
{:version => version, :module_name => module_name, :module_idh => module_idh,:module_branch_idh => module_branch.id_handle()}
-
end
-
end
-
-
1
module Create::Instance
-
# returns new module branch
-
1
def create_new_version(base_version,new_version,opts={})
-
unless aug_base_branch = get_augmented_workspace_branch(Opts.new(:filter => {:version => base_version}))
-
raise ErrorUsage.new("There is no module (#{pp_module_name()}) in the workspace")
-
end
-
-
# make sure there is a not an existing branch that matches the new one
-
if get_module_branch_matching_version(new_version)
-
raise ErrorUsage.new("Version exists already for module (#{pp_module_name(new_version)})")
-
end
-
opts_create_new_branch = Aux.hash_subset(opts,[:sha])
-
repo_for_new_version = aug_base_branch.create_new_branch_from_this_branch?(get_project(),aug_base_branch[:repo],new_version,opts_create_new_branch)
-
opts_type_spec = opts.merge(:ancestor_branch_idh => aug_base_branch.id_handle())
-
new_branch = create_new_version__type_specific(repo_for_new_version,new_version,opts_type_spec)
-
ModuleRefs.clone_component_module_refs(aug_base_branch,new_branch)
-
new_branch
-
end
-
end
-
end; end
-
1
module DTK
-
1
module ModuleMixins
-
1
module GetBranchMixin
-
1
def get_module_branch_from_local_params(local_params)
-
self.class.get_module_branch_from_local(local_params.create_local(get_project()))
-
end
-
-
1
def get_module_branches()
-
get_objs_helper(:module_branches,:module_branch)
-
end
-
-
1
def get_module_branch_matching_version(version=nil)
-
get_module_branches().find{|mb|mb.matches_version?(version)}
-
end
-
-
1
def get_workspace_repo(version=nil)
-
aug_branch = get_augmented_workspace_branch(:filter => {:version => version})
-
aug_branch[:repo]
-
end
-
-
1
def get_workspace_branch_info(version=nil,opts={})
-
if aug_branch = get_augmented_workspace_branch({:filter => {:version => version}}.merge(opts))
-
module_name = aug_branch[:module_name]
-
module_namespace = aug_branch[:module_namespace]
-
opts_info = {:version=>version, :module_namespace=>module_namespace}
-
ModuleRepoInfo.new(aug_branch[:repo],module_name,id_handle(),aug_branch,opts_info)
-
end
-
end
-
-
1
def get_augmented_workspace_branch(opts={})
-
version = (opts[:filter]||{})[:version]
-
version_field = ModuleBranch.version_field(version) #version can be nil
-
sp_hash = {
-
:cols => [:display_name,:workspace_info_full,:namespace]
-
}
-
module_rows = get_objs(sp_hash).select do |r|
-
r[:module_branch][:version] == version_field
-
end
-
-
if module_rows.size == 0
-
unless opts[:donot_raise_error]
-
raise ErrorUsage.new("Module #{pp_module_name(version)} does not exist")
-
end
-
return nil
-
end
-
-
# aggregate by remote_namespace, filtering by remote_namespace if remote_namespace is given
-
unless module_obj = aggregate_by_remote_namespace(module_rows,opts)
-
raise ErrorUsage.new("The module (#{pp_module_name(version)}) is not tied to namespace '#{opts[:filter][:remote_namespace]}' on the repo manager")
-
end
-
-
ret = module_obj[:module_branch].merge(:repo => module_obj[:repo],:module_name => module_obj[:display_name], :module_namespace => module_obj[:namespace][:display_name])
-
if opts[:include_repo_remotes]
-
ret.merge!(:repo_remotes => module_obj[:repo_remotes])
-
end
-
ret
-
end
-
-
# TODO: :library call should be deprecated
-
# type is :library or :workspace
-
1
def find_branch(type,branches)
-
matches =
-
case type
-
when :library then branches.reject{|r|r[:is_workspace]}
-
when :workspace then branches.select{|r|r[:is_workspace]}
-
else raise Error.new("Unexpected type (#{type})")
-
end
-
if matches.size > 1
-
Error.new("Unexpected that there is more than one matching #{type} branches")
-
end
-
matches.first
-
end
-
-
-
#
-
# Returns ModuleBranch object for given version
-
#
-
1
def get_workspace_module_branch(version=nil)
-
mb_mh = model_handle().create_childMH(:module_branch)
-
sp_hash = {
-
:cols => ModuleBranch.common_columns(),
-
:filter => [:and,[:eq,mb_mh.parent_id_field_name(),id()],
-
[:eq,:is_workspace,true],
-
[:eq,:version,ModuleBranch.version_field(version)]]
-
}
-
Model.get_obj(mb_mh,sp_hash)
-
end
-
# MOD_RESTRUCT: may replace below with above
-
1
def get_module_branch(branch)
-
sp_hash = {
-
:cols => [:module_branches]
-
}
-
module_branches = get_objs(sp_hash).map{|r|r[:module_branch]}
-
module_branches.find{|mb|mb[:branch] == branch}
-
end
-
end
-
-
1
module GetBranchClassMixin
-
1
def get_module_branch_from_local(local)
-
project = local.project()
-
project_idh = project.id_handle()
-
module_match_filter =
-
if local_namespace = local.module_namespace_name()
-
[:eq, :ref, Namespace.module_ref_field(local.module_name(),local_namespace)]
-
else
-
[:eq, :display_name, local.module_name]
-
end
-
filter = [:and, module_match_filter, [:eq, :project_project_id, project_idh.get_id()]]
-
branch = local.branch_name()
-
post_filter = proc{|mb|mb[:branch] == branch}
-
matches = get_matching_module_branches(project_idh,filter,post_filter)
-
if matches.size == 0
-
nil
-
elsif matches.size == 1
-
matches.first
-
elsif matches.size > 1
-
raise Error.new("Matched rows has unexpected size (#{matches.size}) since its is >1")
-
end
-
end
-
# TODO: ModuleBranch::Location: deprecate below for above
-
1
def get_workspace_module_branch(project,module_name,version=nil,namespace=nil,opts={})
-
project_idh = project.id_handle()
-
filter = [:and, [:eq, :display_name, module_name], [:eq, :project_project_id, project_idh.get_id()]]
-
filter = filter.push([:eq, :namespace_id, namespace.id()]) if namespace
-
branch = ModuleBranch.workspace_branch_name(project,version)
-
post_filter = proc{|mb|mb[:branch] == branch}
-
matches = get_matching_module_branches(project_idh,filter,post_filter,opts)
-
if matches.size == 0
-
nil
-
elsif matches.size == 1
-
matches.first
-
elsif matches.size > 1
-
Log.error_pp(["Matched rows:",matches])
-
raise Error.new("Matched rows has unexpected size (#{matches.size}) since its is >1")
-
end
-
end
-
-
1
def get_workspace_module_branches(module_idhs)
-
ret = Array.new
-
if module_idhs.empty?
-
return ret
-
end
-
mh = module_idhs.first.createMH()
-
filter = [:oneof,:id,module_idhs.map{|idh|idh.get_id()}]
-
post_filter = proc{|mb|!mb.assembly_module_version?()}
-
get_matching_module_branches(mh,filter,post_filter)
-
end
-
-
1
def get_matching_module_branches(mh_or_idh,filter,post_filter=nil,opts={})
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id,:module_branches],
-
:filter => filter
-
}
-
rows = get_objs(mh_or_idh.create_childMH(module_type()),sp_hash).map do |r|
-
r[:module_branch].merge(:module_id => r[:id])
-
end
-
if rows.empty?
-
return Array.new if opts[:no_error_if_does_not_exist]
-
raise ErrorUsage.new("Module does not exist")
-
end
-
post_filter ? rows.select{|r|post_filter.call(r)} : rows
-
end
-
-
end
-
end
-
end
-
-
-
-
#
-
# All interaction that streamlines interaction with gitolite for modules should be inserted here. Main purpose is grouping
-
# logic for gitolite interaction in here.
-
#
-
-
1
module DTK
-
1
module ModuleMixins
-
1
module Gitolite
-
1
def repo_file_content(module_branch, rel_file_path)
-
repo_full_path, branch = RepoManager.repo_full_path_and_branch(module_branch)
-
dir_parser = ::DtkCommon::DSL::DirectoryParser::Git.new(self.module_type(), repo_full_path, branch)
-
file_content = dir_parser.file_content(rel_file_path)
-
end
-
end
-
end
-
end
-
2
module DTK; module ModuleMixins
-
1
module Remote
-
end
-
-
1
module Remote::Class
-
# install from a dtkn repo; directly in this method handles the module/branch and repo level items
-
# and then calls install__process_dsl to handle model and implementaion/files parts depending on what type of module it is
-
1
def install(project, local_params, remote_params, client_rsa_pub_key, opts={})
-
version = remote_params.version
-
-
# Find information about module and see if it exists
-
local = local_params.create_local(project)
-
local_branch = local.branch_name
-
local_module_name = local.module_name
-
local_namespace = local.module_namespace_name
-
-
if module_obj = module_exists?(project.id_handle(), local_module_name, local_namespace)
-
if module_obj.get_module_branch(local_branch)
-
# do not raise exception if user wants to ignore component import
-
if opts[:ignore_component_error]
-
return module_obj
-
else
-
message = "Conflicts with already installed module (#{local_params.pp_module_name()})"
-
message += ". To ignore this conflict and use installed module please use -i switch (import-dtkn REMOTE-SERVICE-NAME -i)." if opts[:additional_message]
-
raise ErrorUsage.new(message)
-
end
-
end
-
end
-
remote = remote_params.create_remote(project)
-
-
remote_repo_handler = Repo::Remote.new(remote)
-
remote_repo_info = remote_repo_handler.get_remote_module_info?(client_rsa_pub_key, :raise_error=>true)
-
remote.set_repo_name!(remote_repo_info[:git_repo_name])
-
-
# so they are defined outside Transaction scope
-
non_nil_if_parsing_error = module_and_branch_info = commit_sha = parsed = repo_with_branch = nil
-
-
# outside of transaction only doing read/check operations
-
Transaction do
-
# case on whether the module is created already
-
if module_obj
-
# TODO: ModuleBranch::Location: since repo has remote_ref in it must get appopriate repo
-
raise Error.new("TODO: ModuleBranch::Location; need to right this")
-
repo_with_branch = module_obj.get_repo!()
-
else
-
# TODO: ModuleBranch::Location: see if this is necessary
-
remote_repo_handler.authorize_dtk_instance(client_rsa_pub_key)
-
-
# create empty repo on local repo manager;
-
# need to make sure that tests above indicate whether module exists already since using :delete_if_exists
-
create_opts = {
-
:donot_create_master_branch => true,
-
:delete_if_exists => true
-
}
-
repo_user_acls = RepoUser.authorized_users_acls(project.id_handle())
-
repo_with_branch = Repo::WithBranch.create_workspace_repo(project.id_handle(),local,repo_user_acls,create_opts)
-
end
-
-
commit_sha = repo_with_branch.initial_sync_with_remote(remote,remote_repo_info)
-
# create object in object model that corresponds to remote repo
-
create_repo_remote_object(repo_with_branch,remote,remote_repo_info[:git_repo_name])
-
module_and_branch_info = create_module_and_branch_obj?(project,repo_with_branch.id_handle(),local)
-
-
module_obj ||= module_and_branch_info[:module_idh].create_object()
-
module_branch = module_and_branch_info[:module_branch_idh].create_object()
-
-
opts_process_dsl = {:do_not_raise => true}
-
if module_type == :component_module
-
opts_process_dsl.merge!(:set_external_refs => true)
-
end
-
non_nil_if_parsing_error = module_obj.install__process_dsl(repo_with_branch,module_branch,local,opts_process_dsl)
-
module_branch.set_sha(commit_sha)
-
end
-
opts_info = {:version=>version, :module_namespace=>local_namespace}
-
response = module_repo_info(repo_with_branch,module_and_branch_info,opts_info)
-
-
if ErrorUsage::Parsing.is_error?(non_nil_if_parsing_error)
-
response[:dsl_parse_error] = non_nil_if_parsing_error
-
end
-
response
-
end
-
-
1
def delete_remote(project, remote_params, client_rsa_pub_key, force_delete = false)
-
remote = remote_params.create_remote(project)
-
# delete module on remote repo manager
-
Repo::Remote.new(remote).delete_remote_module(client_rsa_pub_key, force_delete)
-
-
# unlink any local repos that were linked to this remote module
-
local_module_name = remote.module_name
-
local_namespace = remote.namespace # TODO: is this right?
-
if module_obj = module_exists?(project.id_handle(),local_module_name, local_namespace)
-
repos = module_obj.get_repos().uniq()
-
# TODO: ModuleBranch::Location: below looks broken
-
# module_obj.get_repos().each do |repo|
-
repos.each do |repo|
-
# we remove remote repos
-
unless repo_remote_db = RepoRemote.get_remote_repo(repo.model_handle(:repo_remote), repo.id, remote.module_name, remote.namespace)
-
raise ErrorUsage.new("Remote component/service (#{remote.pp_module_name()}) does not exist")
-
end
-
-
repo.unlink_remote(remote)
-
-
RepoRemote.delete_repos([repo_remote_db.id_handle()])
-
end
-
end
-
nil
-
end
-
-
1
def list_remotes(model_handle, rsa_pub_key = nil)
-
5
Repo::Remote.new.list_module_info(module_type(), rsa_pub_key)
-
end
-
-
1
def create_repo_remote_object(repo,remote,remote_repo_name)
-
repo_remote_mh = repo.model_handle(:repo_remote)
-
opts = Opts.new(:set_as_default_if_first => true)
-
RepoRemote.create_repo_remote(repo_remote_mh, remote.module_name, remote_repo_name, remote.namespace, repo.id,opts)
-
end
-
end
-
-
1
module Remote::Instance
-
1
def list_remote_diffs(version=nil)
-
local_branch = get_module_branch_matching_version(version)
-
unless default_remote_repo = RepoRemote.default_from_module_branch?(local_branch)
-
raise ErrorUsage.new("Module '#{module_name()}' is not linked to remote repo!")
-
end
-
-
remote_branch = default_remote_repo.remote_dtkn_location(get_project(),module_type(),module_name())
-
diff_objs = local_branch.get_repo().get_remote_diffs(local_branch,remote_branch)
-
ret = diff_objs.map do |diff_obj|
-
path = "diff --git a/#{diff_obj.a_path} b/#{diff_obj.b_path}\n"
-
path + "#{diff_obj.diff}\n"
-
end
-
# TODO: come up with better solution to JSON encoding problem of diffs
-
begin
-
::JSON.generate(ret)
-
rescue
-
ret = "There are diffs between local module and remote one.\n"
-
end
-
ret
-
end
-
-
1
class Info < Hash
-
end
-
-
1
def get_custom_git_remote_module_info(default_remote)
-
Info.new().merge(
-
:module_name => self.module_name,
-
:full_module_name => self.full_module_name,
-
# TODO: will change this key to :remote_ref when upstream uses this
-
:remote_repo => default_remote.remote_ref,
-
:remote_repo_url => default_remote.git_remote_url(),
-
:remote_branch => 'master',
-
:dependency_warnings => []
-
)
-
end
-
-
# raises an access rights usage error if user does not have access to the remote module
-
1
def get_linked_remote_module_info(project,action,remote_params,client_rsa_pub_key,access_rights,module_refs_content=nil)
-
remote = remote_params.create_remote(project)
-
-
repo_remote_handler = Repo::Remote.new(remote)
-
remote_module_info = repo_remote_handler.get_remote_module_info?(
-
client_rsa_pub_key, {
-
:raise_error => true,
-
:module_refs_content => module_refs_content
-
})
-
-
# we also check if user has required permissions
-
# TODO: [Haris] We ignore access rights and force them on calls, this will need ot be refactored since it is security risk
-
# to allow permission to be sent from client
-
if client_rsa_pub_key
-
case action
-
when 'push'
-
response = repo_remote_handler.authorize_dtk_instance(client_rsa_pub_key, Repo::Remote::AuthMixin::ACCESS_WRITE)
-
when 'pull'
-
response = repo_remote_handler.authorize_dtk_instance(client_rsa_pub_key, Repo::Remote::AuthMixin::ACCESS_READ)
-
end
-
end
-
-
unless workspace_branch_obj = remote.get_linked_workspace_branch_obj?(self)
-
raise_error_when_not_properly_linked(action,remote)
-
end
-
-
ret = Info.new().merge(
-
:module_name => remote.module_name,
-
:full_module_name => self.full_module_name,
-
# TODO: will change this key to :remote_ref when upstream uses this
-
:remote_repo => remote.remote_ref,
-
:remote_repo_url => remote_module_info[:remote_repo_url],
-
:remote_branch => remote.branch_name,
-
:dependency_warnings => remote_module_info[:dependency_warnings]
-
)
-
-
if version = remote.version
-
ret.merge!(:version => version)
-
end
-
-
ret
-
end
-
-
# publish to a remote repo
-
# request_params: hash map containing remote_component_name, remote_component_namespace
-
1
def publish(local_params,remote_params,client_rsa_pub_key)
-
project = get_project()
-
remote = remote_params.create_remote(project)
-
local = local_params.create_local(project)
-
-
unless module_branch_obj = self.class.get_module_branch_from_local(local)
-
raise Error.new("Cannot find module_branch_obj from local")
-
end
-
-
publish_preprocess_raise_error?(module_branch_obj)
-
-
file_content = nil
-
# we need to send Repoman information about modules and we do it here
-
module_branch = get_workspace_module_branch()
-
file_content = repo_file_content(module_branch, ModuleRefs.meta_filename_path())
-
-
# create module on remote repo manager
-
# this wil raise error if it exists already or dont have accsss
-
repoman_response = Repo::Remote.new(remote).publish_to_remote(client_rsa_pub_key, file_content)
-
remote_repo_name = repoman_response[:git_repo_name]
-
remote.set_repo_name!(remote_repo_name)
-
-
# link and push to remote repo
-
# create remote repo object
-
repo = get_workspace_repo() #TODO: ModuleBranch::Location: need to update get_workspace_repo if can have multiple module branches
-
repo.link_to_remote(local,remote)
-
repo.push_to_remote(local,remote)
-
-
self.class.create_repo_remote_object(repo,remote,remote_repo_name)
-
repoman_response.merge(:remote_repo_name => remote[:module_name])
-
end
-
-
1
private
-
1
def raise_error_when_not_properly_linked(action,remote)
-
if action == :push
-
raise ErrorUsage.new("Cannot push module (#{module_name()}) to remote namespace (#{remote.namespace}) because it is currently not linked to it")
-
else #action == :pull
-
raise ErrorUsage.new("Cannot pull module (#{module_name()}) from remote namespace (#{remote.namespace}) because it is currently not linked to it")
-
end
-
end
-
end
-
-
end; end
-
-
1
module DTK
-
1
module AutoImport
-
1
def get_required_and_missing_modules(project, remote_params, client_rsa_pub_key=nil)
-
remote = remote_params.create_remote(project)
-
response = Repo::Remote.new(remote).get_remote_module_components(client_rsa_pub_key)
-
opts = Opts.new(:project_idh => project.id_handle())
-
-
# this method will return array with missing and required modules
-
module_info_array = self.cross_reference_modules(opts, response['component_info'], remote.namespace, response['dependency_warnings'])
-
end
-
-
# Method will check if given component modules are present on the system
-
# returns [missing_modules, found_modules]
-
1
def cross_reference_modules(opts, required_modules, service_namespace=nil, dependency_warnings=nil)
-
project_idh = opts.required(:project_idh)
-
-
required_modules ||= []
-
req_names = required_modules.collect { |m| m['module_name']}
-
-
missing_modules, found_modules = [], []
-
-
required_modules.each do |r_module|
-
name = r_module["module_name"]
-
type = r_module["module_type"]
-
version = r_module["version_info"]
-
url = r_module["module_url"]
-
# we support both fields for namespace
-
namespace = r_module["remote_namespace"]||r_module["module_namespace"]
-
-
i_modules = installed_modules(type.to_sym, project_idh)
-
-
is_found = i_modules.find do |i_module|
-
name.eql?(i_module.display_name) and
-
ModuleVersion.versions_same?(version, i_module.fetch(:module_branch,{})[:version]) and
-
(namespace.nil? or namespace.eql?(i_module.module_namespace))
-
end
-
-
data = data_element(name, namespace||service_namespace, type, version, url)
-
-
if is_found
-
found_modules << data
-
else
-
missing_modules << data
-
end
-
end
-
-
# delete modules that are alreafy installed
-
if dependency_warnings
-
dependency_warnings.reject! do |el|
-
reject_it = false
-
if el['error_type'].eql?('not_found')
-
installed = installed_modules(el['module_type'], project_idh)
-
installed.each do |i_module|
-
# does it match name and namespace
-
installed_name = i_module.display_name
-
installed_ns = i_module.module_namespace
-
-
if (el['module_name'].eql?(installed_name) and el['module_namespace'].eql?(installed_ns))
-
found_modules << data_element(installed_name, installed_ns, el['module_type'])
-
reject_it = true
-
break
-
end
-
end
-
end
-
-
reject_it
-
end
-
end
-
-
# important
-
clear_cached()
-
-
[missing_modules, found_modules, dependency_warnings]
-
end
-
-
1
private
-
-
1
def data_element(name, namespace, type, version = nil, url = nil)
-
{ :name => name, :version => version, :type => type, :namespace => namespace, :module_url => url }
-
end
-
-
1
def clear_cached()
-
@cached_module_list = {}
-
end
-
-
1
def installed_modules(type, project_idh)
-
@cached_module_list ||= {}
-
-
type = type.to_sym
-
-
unless @cached_module_list[type]
-
sp_hash = {
-
:cols => [:id, :display_name, :namespace].compact,
-
:filter => [:eq, :project_project_id, project_idh.get_id()]
-
}
-
mh = project_idh.createMH(type)
-
@cached_module_list[type] = get_objs(mh,sp_hash)
-
end
-
-
@cached_module_list[type]
-
end
-
-
end
-
end
-
-
2
module DTK; class BaseModule
-
1
module DeleteMixin
-
1
def delete_object()
-
1
assembly_templates = get_associated_assembly_templates()
-
1
unless assembly_templates.empty?
-
assembly_names = assembly_templates.map{|a|a.display_name_print_form(:include_namespace=>true)}
-
raise ErrorUsage.new("Cannot delete the component module because the assembly template(s) (#{assembly_names.join(',')}) reference it")
-
end
-
-
1
components = get_associated_component_instances()
-
1
raise_error_component_refs(components) unless components.empty?
-
-
1
impls = get_implementations()
-
2
delete_instances(impls.map{|impl|impl.id_handle()})
-
1
repos = get_repos()
-
2
repos.each{|repo|RepoManager.delete_repo(repo)}
-
2
delete_instances(repos.map{|repo|repo.id_handle()})
-
1
delete_instance(id_handle())
-
1
{:module_name => module_name()}
-
end
-
-
1
def delete_version?(version)
-
delete_version(version,:no_error_if_does_not_exist=>true)
-
end
-
1
def delete_version(version,opts={})
-
ret = {:module_name => module_name()}
-
unless module_branch = get_module_branch_matching_version(version)
-
if opts[:no_error_if_does_not_exist]
-
return ret
-
else
-
raise ErrorUsage.new("Version '#{version}' for specified component module does not exist")
-
end
-
end
-
-
if implementation = module_branch.get_implementation()
-
delete_instance(implementation.id_handle())
-
end
-
module_branch.delete_instance_and_repo_branch()
-
ret
-
end
-
-
-
1
private
-
1
def raise_error_component_refs(components)
-
ndx_assemblies = Hash.new
-
asssembly_ids = components.map{|r|r[:assembly_id]}.compact
-
unless asssembly_ids.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:oneof,:id,asssembly_ids]
-
}
-
ndx_assemblies = Assembly::Instance.get_objs(model_handle(:assembly_instance),sp_hash).inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
end
-
refs = components.map do |r|
-
cmp_ref = r.display_name_print_form(:node_prefix=>true,:namespace_prefix=>true)
-
ref =
-
if cmp_ref =~ /(^[^\/]+)\/([^\/]+$)/
-
"Reference to '#{$2}' on node '#{$1}'"
-
else
-
"Reference to '#{cmp_ref}'"
-
end
-
if assembly = ndx_assemblies[r[:assembly_id]]
-
ref << " in service instance '#{assembly.display_name_print_form()}'"
-
end
-
ref
-
end
-
raise ErrorUsage.new("Cannot delete the component module because the following:\n #{refs.join("\n ")}") end
-
end
-
end; end
-
1
module DTK
-
1
class ModuleDSL
-
1
r8_nested_require('dsl','parsing_error')
-
1
r8_nested_require('dsl','update_model')
-
1
r8_nested_require('dsl','generate_from_impl')
-
1
r8_nested_require('dsl','object_model_form')
-
1
r8_nested_require('dsl','incremental_generator')
-
# TODO: this needs to be after object_model_form, because object_model_form loads errors; should move errors to parent and include first here
-
1
r8_nested_require('dsl','ref_integrity')
-
1
extend UpdateModelClassMixin
-
1
include UpdateModelMixin
-
-
1
attr_reader :input_hash,:project_idh,:module_branch
-
1
def initialize(impl_idh,module_branch,version_specific_input_hash,opts={})
-
@module_branch = module_branch
-
@input_hash = version_parse_check_and_normalize(version_specific_input_hash)
-
@impl_idh = impl_idh
-
@project_idh = impl_idh.get_parent_id_handle_with_auth_info()
-
@ref_integrity_snapshot = opts[:ref_integrity_snapshot]
-
@component_module = opts[:component_module]
-
# TODO: deprecate <config_agent_type>
-
@config_agent_type = ConfigAgent::Type.default_symbol
-
end
-
1
private :initialize
-
-
1
def self.parse_dsl(component_module,impl_obj,opts={})
-
ref_integrity_snapshot = RefIntegrity.snapshot_associated_assembly_templates(component_module)
-
opts_create_dsl = opts.merge(:ref_integrity_snapshot => ref_integrity_snapshot, :component_module => component_module)
-
create_dsl_object_from_impl(impl_obj,opts_create_dsl)
-
end
-
-
1
def update_model_with_ref_integrity_check(opts={})
-
# get associated assembly templates before do any updates and use this to see if any referential integrity
-
# problems within transaction after do update; transaction is aborted if any errors found
-
Model.Transaction do
-
update_opts = {
-
:override_attrs => {"module_branch_id" => @module_branch.id()},
-
:namespace => component_module().module_namespace()
-
}
-
update_opts.merge!(:version => opts[:version]) if opts[:version]
-
update_model(update_opts)
-
-
ref_integrity_snapshot.raise_error_if_any_violations()
-
ref_integrity_snapshot.integrity_post_processing()
-
end
-
end
-
-
# parses and creates dsl_object form file in implementation
-
# or content if passed in opts
-
1
def self.create_dsl_object_from_impl(impl_obj,opts={})
-
if dsl_created_info = opts[:dsl_created_info]
-
dsl_filename = dsl_created_info[:path]
-
content = dsl_created_info[:content]
-
else
-
info = get_dsl_file_raw_content_and_info(impl_obj)
-
dsl_filename = info[:dsl_filename]
-
content = info[:content]
-
end
-
create_from_file_obj_hash(impl_obj,dsl_filename,content,opts)
-
end
-
# parses and creates dsl_object form hash parsed in as target
-
1
def self.create_from_file_obj_hash(impl_obj,dsl_filename,content,opts={})
-
unless isa_dsl_filename?(dsl_filename)
-
raise Error.new("The file path (#{dsl_filename}) does not refer to a dsl file name")
-
end
-
parsed_name = parse_dsl_filename(dsl_filename)
-
opts[:file_path] = dsl_filename
-
input_hash = convert_to_hash(content,parsed_name[:format_type],opts)
-
return input_hash if ParsingError.is_error?(input_hash)
-
-
name_attribute_check = name_attribute_integrity_check(input_hash['components'])
-
return name_attribute_check if ParsingError.is_error?(name_attribute_check)
-
-
ParsingError.trap do
-
module_branch = impl_obj.get_module_branch()
-
new(impl_obj.id_handle(),module_branch,input_hash,opts)
-
end
-
end
-
-
# returns [dsl_file_path,hash_content,fragment_hash]
-
1
def self.incremental_generate(module_branch,augmented_objects,context={})
-
augmented_objects = [augmented_objects] unless augmented_objects.kind_of?(Array)
-
helper = IncrementalGeneratorHelper.new(augmented_objects)
-
info = get_dsl_file_hash_content_info(module_branch)
-
full_hash = info[:hash_content]
-
fragment_hash = helper.update_full_hash!(full_hash,augmented_objects,context)
-
[info[:dsl_filename],full_hash,fragment_hash]
-
end
-
-
1
def self.contains_dsl_file?(impl_obj,dsl_integer_version=nil,format_type=nil)
-
dsl_integer_version ||= integer_version(dsl_integer_version)
-
unless regexp = DSLFilenameRegexp[dsl_integer_version]
-
raise Error.new("Do not treat Component DSL version: #{dsl_integer_version.to_s}")
-
end
-
format_ext_regexp = (format_type && Regexp.new("\.(#{ExtensionToType[format_type.to_sym]}$)"))
-
depth = 1
-
RepoManager.ls_r(depth,{:file_only => true},impl_obj).find do |f|
-
(f =~ regexp) and (format_ext_regexp.nil? or f =~ format_ext_regexp)
-
end
-
end
-
-
1
def self.default_integer_version()
-
R8::Config[:dsl][:component][:integer_version][:default].to_i
-
end
-
-
1
def self.default_format_type()
-
R8::Config[:dsl][:component][:format_type][:default].to_sym
-
end
-
-
1
def self.version(integer_version=nil)
-
integer_version ||= integer_version()
-
VersionIntegerToVersion[integer_version]
-
end
-
-
1
def self.name_attribute_integrity_check(components)
-
return unless components
-
names = []
-
-
components.each do |name,value|
-
# if component is 'puppet definition'
-
if (value['external_ref'].kind_of?(Hash) ? value['external_ref'] : {}).has_key?('puppet_definition')
-
attributes = value['attributes']
-
names = get_name_attributes(attributes)
-
return ParsingError::BadPuppetDefinition.new(:component => name, :invalid_names => names) unless names.size == 1
-
# if names.size == 1
-
# return ParsingError::BadPuppetDefinition.new(:component => name, :missing_req_or_def => missing_req_or_def) unless missing_req_or_def.empty?
-
# else
-
# return ParsingError::BadPuppetDefinition.new(:component => name, :invalid_names => names)
-
# end
-
end
-
end
-
end
-
-
1
def self.get_name_attributes(attributes)
-
names = []
-
return names unless attributes
-
-
attributes.each do |n_name, n_attr|
-
if n_name.eql?('name')
-
names << n_name
-
# missing_req_or_def << n_name unless (n_attr.has_key?('required') || n_attr.has_key?('default'))
-
elsif ext_ref = n_attr['external_ref']
-
names << n_name if (ext_ref.has_key?('puppet_attribute') && ext_ref['puppet_attribute'].eql?('name'))
-
end
-
end
-
return names
-
end
-
# returns parsing_error if parsing error
-
-
# TODO: this might move to a more common area
-
1
def self.convert_attribute_mapping(input_am,base_cmp,dep_cmp,opts={})
-
integer_version = 2 #TODO: fix this being hard coded
-
klass = load_and_return_version_adapter_class(integer_version)
-
klass.convert_attribute_mapping_helper(input_am,base_cmp,dep_cmp,opts)
-
end
-
-
1
private
-
1
def ref_integrity_snapshot()
-
unless @ref_integrity_snapshot
-
raise Error.new("Unexpected that @ref_integrity_snapshot is nil")
-
end
-
@ref_integrity_snapshot
-
end
-
-
1
def component_module()
-
unless @component_module
-
raise Error.new("Unexpected that @component_module is nil")
-
end
-
@component_module
-
end
-
-
1
class IncrementalGeneratorHelper < self
-
1
def initialize(augmented_objects)
-
@object_class = object_class(augmented_objects)
-
-
integer_version = self.class.default_integer_version()
-
base_klass = self.class.load_and_return_version_adapter_class(integer_version)
-
@version_klass = base_klass.const_get('IncrementalGenerator')
-
end
-
-
1
def update_full_hash!(full_hash,augmented_objects,context={})
-
fragment_hash = get_config_fragment_hash_form(augmented_objects)
-
merge_fragment_into_full_hash!(full_hash,@object_class,fragment_hash,context)
-
fragment_hash
-
end
-
-
1
def get_config_fragment_hash_form(augmented_objects)
-
augmented_objects.inject(Hash.new) do |h,aug_obj|
-
generated_hash = @version_klass.generate(aug_obj)
-
h.merge(generated_hash)
-
end
-
end
-
-
1
def merge_fragment_into_full_hash!(full_hash,object_class,fragment,context={})
-
@version_klass.merge_fragment_into_full_hash!(full_hash,object_class,fragment,context)
-
full_hash
-
end
-
-
1
def object_class(augmented_objects)
-
object_classes = augmented_objects.map{|obj|obj.class}.uniq
-
unless object_classes.size == 1
-
object_classes_print_form = object_classes.map{|r|r.to_s}.join(',')
-
raise Error.new("augmented_objects must have the same type rather than (#{object_classes_print_form})")
-
end
-
object_classes.first
-
end
-
end
-
-
1
def self.get_dsl_file_hash_content_info(impl_or_module_branch_obj,dsl_integer_version=nil,format_type=nil)
-
impl_obj =
-
if impl_or_module_branch_obj.kind_of?(Implementation)
-
impl_or_module_branch_obj
-
elsif impl_or_module_branch_obj.kind_of?(ModuleBranch)
-
impl_or_module_branch_obj.get_implementation()
-
else raise Error.new("Unexpected object type for impl_or_module_branch_obj (#{impl_or_module_branch_obj.class})")
-
end
-
info = get_dsl_file_raw_content_and_info(impl_obj,dsl_integer_version,format_type)
-
{:hash_content => convert_to_hash(info[:content],info[:format_type])}.merge(Aux::hash_subset(info,[:format_type,:dsl_filename]))
-
end
-
-
1
def self.get_dsl_file_raw_content_and_info(impl_obj,dsl_integer_version=nil,format_type=nil)
-
unless dsl_filename = contains_dsl_file?(impl_obj,dsl_integer_version,format_type)
-
raise Error.new("Cannot find DSL file")
-
end
-
parsed_name = parse_dsl_filename(dsl_filename,dsl_integer_version)
-
format_type ||= parsed_name[:format_type]
-
content = RepoManager.get_file_content(dsl_filename,:implementation => impl_obj)
-
{:content => content,:format_type => format_type,:dsl_filename => dsl_filename}
-
end
-
-
1
def version_parse_check_and_normalize(version_specific_input_hash)
-
integer_version = integer_version(version_specific_input_hash)
-
klass = self.class.load_and_return_version_adapter_class(integer_version)
-
# normalize also raises any parse errors
-
klass.normalize(version_specific_input_hash)
-
end
-
-
1
def self.dsl_filename(format_type,dsl_integer_version=nil)
-
first_part = 'dtk.model'
-
unless extension = TypeToExtension[format_type]
-
legal_types = TypeToExtension.values.uniq.join(',')
-
raise Error.new("Illegal dsl_filename extension (#{format_type}); legal types are: #{legal_types}")
-
end
-
"#{first_part}.#{extension}"
-
end
-
-
1
def integer_version(version_specific_input_hash)
-
version = version_specific_input_hash["dsl_version"]
-
unless integer_version = (version ? VersionToVersionInteger[version.to_s] : VersionIntegerWhenVersionMissing)
-
raise ErrorUsage.new("Illegal version (#{version}) found in meta file")
-
end
-
integer_version
-
end
-
1
VersionIntegerWhenVersionMissing = 1
-
1
VersionToVersionInteger = {
-
"0.9" => 2,
-
"0.9.1" => 3,
-
"1.0.0" => 4
-
}
-
1
VersionIntegerToVersion = VersionToVersionInteger.inject(Hash.new) do |h,(v,vi)|
-
3
h.merge(vi=>v)
-
end
-
-
1
DSLFilenameRegexp = {
-
1 => /^r8meta\.[a-z]+\.([a-z]+$)/,
-
2 => /^dtk\.model\.([a-z]+$)/,
-
3 => /^dtk\.model\.([a-z]+$)/,
-
4 => /^dtk\.model\.([a-z]+$)/,
-
}
-
-
1
VersionsTreated = DSLFilenameRegexp.keys
-
1
ExtensionToType = {
-
"yaml" => :yaml,
-
"json" => :json
-
}
-
3
TypeToExtension = ExtensionToType.inject(Hash.new){|h,(k,v)|h.merge(v => k)}
-
-
1
class << self
-
1
def load_and_return_version_adapter_class(integer_version)
-
@cached_adapter_class ||= Hash.new
-
return @cached_adapter_class[integer_version] if @cached_adapter_class[integer_version]
-
adapter_name = "v#{integer_version.to_s}"
-
opts = {
-
:class_name => {:adapter_type => adapter_type()},
-
:subclass_adapter_name => true
-
}
-
@cached_adapter_class[integer_version] = DynamicLoader.load_and_return_adapter_class(adapter_dir(),adapter_name,opts)
-
end
-
-
1
def isa_dsl_filename?(filename,dsl_integer_version=nil)
-
filename =~ DSLFilenameRegexp[integer_version(dsl_integer_version)]
-
end
-
-
1
private
-
1
def adapter_type()
-
"ModuleDSL"
-
end
-
1
def adapter_dir()
-
"dsl"
-
end
-
-
1
def Transaction(*args,&block)
-
Model.Transaction(*args,&block)
-
end
-
-
1
def integer_version(pos_val=nil)
-
pos_val ? pos_val.to_i : default_integer_version()
-
end
-
-
# returns hash with keys: :format_type
-
1
def parse_dsl_filename(filename,dsl_integer_version=nil)
-
if filename =~ DSLFilenameRegexp[integer_version(dsl_integer_version)]
-
file_extension = $1
-
unless format_type = ExtensionToType[file_extension]
-
raise Error.new("illegal file extension #{file_extension}")
-
end
-
{:format_type => format_type}
-
else
-
raise Error.new("Component filename (#{filename}) has illegal form")
-
end
-
end
-
-
# TODO: deprecate <config_agent_type>
-
1
def ret_config_agent_type(input_hash)
-
return input_hash if ParsingError.is_error?(input_hash)
-
if type = input_hash["module_type"]
-
case type
-
when "puppet_module" then ConfigAgent::Type::Symbol.puppet
-
# Part of code to handle new serverspec type of module
-
when "serverspec" then ConfigAgent::Type::Symbol.serverspec
-
when "test" then ConfigAgent::Type::Symbol.test
-
when "node_module" then ConfigAgent::Type::Symbol.node_module
-
else
-
ParsingError.new("Unexpected module_type (#{type})")
-
end
-
else
-
ConfigAgent::Type.default_symbol()
-
end
-
end
-
-
1
def convert_to_hash(content,format_type,opts={})
-
begin
-
Aux.convert_to_hash(content,format_type,opts)
-
rescue ArgumentError => e
-
raise ErrorUsage.new("Error parsing the component dsl file; #{e.to_s}")
-
end
-
end
-
-
end
-
end
-
end
-
2
module DTK; class ModuleDSL
-
1
class GenerateFromImpl
-
1
r8_nested_require("generate_from_impl","dsl_object")
-
1
def self.create(integer_version=nil)
-
integer_version ||= ModuleDSL.default_integer_version()
-
unless SupportedIntegerVersions.include?(integer_version)
-
raise Error.new("Unexpected integer version (#{integer_version})")
-
end
-
new(integer_version)
-
end
-
1
SupportedIntegerVersions = [1,2,3,4]
-
-
1
def initialize(integer_version)
-
@integer_version = integer_version
-
end
-
1
private :initialize
-
-
1
def generate_refinement_hash(parse_struct,module_name,impl_idh)
-
context = {
-
:integer_version => @integer_version,
-
:module_name => module_name,
-
:config_agent_type => parse_struct.config_agent_type,
-
:implementation_id => impl_idh.get_id()
-
}
-
DSLObject.new(context).create(:module,parse_struct)
-
end
-
-
1
def self.save_dsl_info(meta_info_hash,impl_mh)
-
# TODO: check
-
raise Error.new("Need to cehck if meta_info_hash['version'] is right call")
-
integer_version = meta_info_hash["version"]
-
config_agent_type = meta_info_hash["config_agent_type"]
-
module_name = meta_info_hash["module_name"]
-
components = meta_info_hash["components"]
-
impl_id = meta_info_hash["implementation_id"]
-
module_hash = {
-
:required => true,
-
:type => "module",
-
:def => {"components" => components}
-
}
-
impl_obj = impl_mh.createIDH(:id => impl_id).create_object().update_object!(:id,:display_name,:type,:repo_id,:repo,:library_library_id)
-
impl_idh = impl_obj.id_handle
-
library_idh = impl_idh.createIDH(:model_name => :library,:id => impl_obj[:library_library_id])
-
repo_obj = Model.get_obj(impl_idh.createMH(:repo),{:cols => [:id,:local_dir], :filter => [:eq, :id, impl_obj[:repo_id]]})
-
-
dsl_generator = create(integer_version)
-
object_form = dsl_generator.reify(module_hash,module_name,config_agent_type)
-
r8meta_hash = object_form.render_hash_form()
-
-
r8meta_path = "#{repo_obj[:local_dir]}/r8meta.#{config_agent_type}.yml"
-
r8meta_hash.write_yaml(STDOUT)
-
File.open(r8meta_path,"w"){|f|r8meta_hash.write_yaml(f)}
-
-
# this wil add any file_assets that have not been yet added (this will include the r8meta file
-
impl_obj.create_file_assets_from_dir_els()
-
-
add_components_from_r8meta(library_idh,config_agent_type,impl_idh,r8meta_hash)
-
-
impl_obj.add_contained_files_and_push_to_repo()
-
end
-
-
1
def reify(hash,module_name,config_agent_type)
-
context = {
-
:integer_version => @integer_version,
-
# TODO: do we neeed module_name and :config_agent_type for reify?
-
:module_name => module_name,
-
:config_agent_type => config_agent_type
-
}
-
DSLObject.new(context).reify(hash)
-
end
-
end
-
end; end
-
2
module DTK; class ModuleDSL
-
1
class GenerateFromImpl
-
1
module CommonMixin
-
1
def set_hash_key(key)
-
self[:id] = key
-
key
-
end
-
1
def hash_key()
-
self[:id]
-
end
-
-
1
def create_external_ref(name,type)
-
DSLObject::RenderHash.new([{"name" => name},{"type" => type}])
-
end
-
-
1
def sanitize_attribute(attr)
-
attr.gsub(/[^a-zA-Z0-9_-]/,"-")
-
end
-
1
def t(term)
-
term
-
# TODO probably remove return nil if term.nil?
-
# DSLTerm.new(term)
-
end
-
1
def unknown
-
nil
-
# TODO: probably remove DSLTerm.create_unknown
-
end
-
1
def nailed(term)
-
term #TODO: may also make this a DSLTerm obj
-
end
-
end
-
-
1
r8_nested_require('dsl_object','store_config_handler')
-
1
include StoreConfigHandlerMixin
-
-
1
class DSLStructObject < SimpleOrderedHash
-
1
def initialize(type,content,opts={})
-
if opts[:reify]
-
super([{:type => type.to_s},{:required => opts[:required]},{:def => content}])
-
else
-
super([{:type => type.to_s},{:required => nil},{:def => content}])
-
self[:required] = content.delete(:include)
-
end
-
end
-
-
1
def hash_key()
-
self[:def].hash_key()
-
end
-
-
1
def render_hash_form(opts={})
-
self[:def].render_hash_form(opts)
-
end
-
end
-
-
1
class DSLArray < Array
-
1
def each_element(opts={},&block)
-
each do |el|
-
unless opts[:skip_required_is_false] and (not el.nil?) and not el
-
block.call(el[:def])
-
end
-
end
-
end
-
-
1
def map_element(opts={},&block)
-
ret = Array.new
-
each do |el|
-
unless opts[:skip_required_is_false] and (not el.nil?) and not el
-
ret << block.call(el[:def])
-
end
-
end
-
ret
-
end
-
-
1
def +(a2)
-
ret = self.class.new
-
each{|x|ret << x}
-
a2.each{|x|ret << x}
-
ret
-
end
-
end
-
-
1
class DSLObject < SimpleOrderedHash
-
1
r8_nested_require('dsl_object','object_classes')
-
1
include CommonMixin
-
1
def initialize(context,opts={})
-
super()
-
@context = context
-
create_in_object_form(opts[:def]) if opts[:reify]
-
end
-
# TODO: make as part of context
-
1
ScaffoldingStrategy = {
-
:no_dynamic_attributes => true,
-
:no_defaults => true,
-
:ignore_components => ['params']
-
}
-
-
1
def create(type,parse_struct,opts={})
-
DSLStructObject.new(type,klass(type).new(parse_struct,@context.merge(opts)))
-
end
-
-
# dup used because yaml generation is upstream and dont want string refs
-
1
def required_value(key)
-
unless has_key?(key)
-
raise Error.new("meta object does not have key #{key}")
-
end
-
-
value_term = self[key]
-
raise Error.new("meta object with key #{key} is null") if value_term.nil?
-
return value_term.dup unless value_term.kind_of?(DSLTerm)
-
-
unless value_term.is_known?()
-
raise Error.new("meta object with key #{key} has unknown value")
-
end
-
value_term.value.dup
-
end
-
-
1
def value(key)
-
value_term = self[key]
-
return nil if value_term.nil?
-
return value_term unless value_term.kind_of?(DSLTerm)
-
value_term.is_known?() ? value_term.value : nil
-
end
-
-
1
def set_source_ref(parse_struct)
-
@context[:source_ref] = parse_struct
-
end
-
-
# functions to convert to object form
-
1
def reify(hash)
-
type = index(hash,:type)
-
content = klass(type).new(nil,@context,{:reify => true, :def => index(hash,:def)})
-
DSLStructObject.new(type,content,{:reify => true, :required => index(hash,:required)})
-
end
-
-
1
private
-
# functions to treat object functions
-
# can be overwritten
-
1
def object_attributes()
-
[]
-
end
-
1
def index(hash,key)
-
if hash.has_key?(key.to_s)
-
hash[key.to_s]
-
elsif hash.has_key?(key.to_sym)
-
hash[key.to_sym]
-
end
-
end
-
-
1
def has_index?(hash,key)
-
hash.has_key?(key.to_s) or hash.has_key?(key.to_sym)
-
end
-
-
1
def create_in_object_form(hash)
-
hash.each{|k,v|self[k.to_sym] = convert_value_if_needed(k,v)}
-
end
-
-
1
def convert_value_if_needed(key,val)
-
return val unless object_attributes().include?(key.to_sym)
-
-
if val.kind_of?(Array)
-
ret = DSLArray.new
-
val.each{|el_val| ret << reify(el_val) if selected?(el_val)}
-
ret
-
else
-
# TODO: no check for selcted here?
-
reify(val)
-
end
-
end
-
-
1
def selected?(hash)
-
index(hash,:selected) or not has_index?(hash,:selected) #default is 'selected'
-
end
-
-
###utilities
-
1
def is_foreign_component_name?(name)
-
if name =~ /(^.+)::.+$/
-
prefix = $1
-
prefix == module_name ? nil : true
-
end
-
end
-
-
1
def klass(type)
-
version_class = ModuleDSL.load_and_return_version_adapter_class(integer_version())
-
cap_type = type.to_s.split("_").map{|t|t.capitalize}.join("")
-
version_class.const_get("DSLObject").const_get(cap_type)
-
end
-
-
# context
-
1
def integer_version()
-
(@context||{})[:integer_version]
-
end
-
1
def module_name()
-
(@context||{})[:module_name]
-
end
-
1
def config_agent_type()
-
(@context||{})[:config_agent_type]
-
end
-
1
public
-
1
def parent()
-
(@context||{})[:parent]
-
end
-
1
def parent_source()
-
(@context||{})[:parent_source]
-
end
-
1
def source_ref()
-
(@context||{})[:source_ref]
-
end
-
-
# TODO: may deprecate
-
# handles intermediate state where objects may be unknown and just need users input
-
1
class DSLTerm < SimpleHashObject
-
1
def initialize(value,state=:known)
-
self[:value] = value if state == :known
-
self[:state] = state
-
end
-
1
def self.create_unknown()
-
new(nil,:unknown)
-
end
-
-
1
def set_value(v)
-
self[:state] = :known
-
self[:value] = v
-
end
-
-
1
def value()
-
self[:value]
-
end
-
1
def is_known?()
-
self[:state] == :known
-
end
-
end
-
-
1
class VarMatches < Array
-
1
def add(input_var,output_var)
-
self << {:input_var => input_var,:output_var => output_var}
-
self
-
end
-
1
def +(a2)
-
ret = self.class.new
-
each{|x|ret << x}
-
a2.each{|x|ret << x}
-
ret
-
end
-
end
-
-
1
class RenderHash < SimpleOrderedHash
-
1
def serialize(format_type=nil)
-
format_type ||= ModuleDSL.default_format_type()
-
if format_type == :yaml
-
Aux.serialize(yaml_form(),format_type)
-
else
-
Aux.serialize(self,format_type)
-
end
-
end
-
-
# TODO: deprecate
-
1
def write_yaml(io)
-
require 'yaml'
-
YAML::dump(yaml_form(),io)
-
io << "\n"
-
end
-
-
# since yaml generator is being used want to remove references so dont generate yaml with labels
-
1
def yaml_form(level=1)
-
ret = RenderHash.new
-
each do |k,v|
-
if level == 1 and k == "version"
-
next
-
end
-
converted_val =
-
if v.kind_of?(RenderHash)
-
v.yaml_form(level+1)
-
elsif v.kind_of?(Array)
-
v.map{|el|el.kind_of?(RenderHash) ? el.yaml_form(level+1) : el.dup?}
-
else
-
v.dup?
-
end
-
ret[k.dup?] = converted_val
-
end
-
ret
-
end
-
end
-
end
-
end
-
end; end
-
-
# TODO: better sepearte parts that are where creating refinement has and parts for rendering in dtk dsl vrsion specfic form
-
2
module DTK; class ModuleDSL
-
1
class GenerateFromImpl
-
1
class DSLObject
-
1
class Module < self
-
1
def initialize(top_parse_struct,context,opts={})
-
super(context,opts)
-
return if opts[:reify]
-
context.each{|k,v|self[k] = v}
-
self[:components] = DSLArray.new
-
top_parse_struct.each_component do |component_ps|
-
self[:components] << create(:component,component_ps)
-
end
-
process_imported_resources()
-
end
-
-
1
def render_hash_form(opts={})
-
ret = RenderHash.new
-
ret.set_unless_nil("module",module_name?())
-
ret.set_unless_nil("dsl_version",ModuleDSL.version(integer_version()))
-
self[:components].each_element(:skip_required_is_false => true) do |cmp|
-
hash_key = render_cmp_ref(cmp.hash_key)
-
unless (ScaffoldingStrategy[:ignore_components]||[]).include?(hash_key)
-
add_component!(ret,hash_key,cmp.render_hash_form(opts))
-
end
-
end
-
set_include_modules!(ret,opts)
-
ret
-
end
-
-
1
def render_to_file(file,format)
-
end
-
-
1
private
-
1
def object_attributes()
-
[:components]
-
end
-
-
1
def set_include_modules!(ret,opts={})
-
end
-
-
1
def process_imported_resources()
-
# first get all the exported resources and imported resources
-
# TODO: more efficient to store these in first phase
-
attr_exp_rscs = Array.new
-
attr_imp_colls = Array.new
-
self[:components].each_element do |cmp|
-
cmp[:attributes].each_element do |attr|
-
parse_struct = attr.source_ref
-
if parse_struct
-
if parse_struct.is_exported_resource?()
-
attr_exp_rscs << attr
-
elsif parse_struct.is_imported_collection?()
-
attr_imp_colls << attr
-
end
-
end
-
end
-
end
-
return if attr_exp_rscs.empty? or attr_imp_colls.empty?
-
# get all teh matches
-
matches = Array.new
-
matches = attr_imp_colls.map do |attr_imp_coll|
-
if match = matching_storeconfig_vars?(attr_imp_coll,attr_exp_rscs)
-
{
-
:type => :imported_collection,
-
:attr_imp_coll => attr_imp_coll,
-
:attr_exp_rsc => match[:attr_exp_rsc],
-
:matching_vars => match[:vars]
-
}
-
end
-
end.compact
-
return if matches.empty?
-
set_user_friendly_names_for_storeconfig_vars!(matches)
-
-
# create link defs
-
matches.each do |match|
-
if link_def = create(:link_def,match)
-
(match[:attr_imp_coll].parent[:link_defs] ||= DSLArray.new) << link_def
-
end
-
end
-
end
-
1
def matching_storeconfig_vars?(attr_imp_coll,attr_exp_rscs)
-
attr_exp_rscs.each do |attr_exp_rsc|
-
if matching_vars = attr_imp_coll.source_ref.match_exported?(attr_exp_rsc.source_ref)
-
return {:vars => matching_vars, :attr_exp_rsc => attr_exp_rsc}
-
end
-
end
-
nil
-
end
-
-
1
def set_user_friendly_names_for_storeconfig_vars!(matches)
-
translated_ids = Array.new
-
matches.each do |match|
-
imp_attr = match[:attr_imp_coll]
-
exp_attr = match[:attr_exp_rsc]
-
unless translated_ids.include?(imp_attr[:id])
-
translated_ids << imp_attr[:id]
-
imp_attr.reset_hash_key_and_name_fields("import_from_#{exp_attr.parent[:id]}")
-
end
-
unless translated_ids.include?(exp_attr[:id])
-
translated_ids << exp_attr[:id]
-
exp_attr.reset_hash_key_and_name_fields("export_to_#{imp_attr.parent[:id]}")
-
end
-
end
-
end
-
-
# for render_hash
-
1
def module_name?()
-
nil
-
end
-
-
1
def module_type?()
-
nil
-
end
-
-
1
def render_cmp_ref(hash_key)
-
hash_key
-
end
-
end
-
-
1
class Component < self
-
1
def initialize(component_ps,context,opts={})
-
super(context,opts)
-
return if opts[:reify]
-
processed_name = component_ps[:name]
-
# if qualified name make sure matches module name
-
if processed_name =~ /(^[^:]+)::(.+$)/
-
prefix = $1
-
unqual_name = $2
-
unless prefix == module_name
-
raise ErrorUsage::Parsing.new("Component (#{processed_name}) has a module name not equal to the base module name (#{module_name})")
-
end
-
processed_name = "#{module_name}__#{unqual_name}"
-
end
-
-
set_hash_key(processed_name)
-
self[:display_name] = t(processed_name) #TODO: might instead put in label
-
self[:description] = unknown
-
self[:ui_png] = unknown
-
type = "#{component_ps.config_agent_type}_#{component_ps[:type]}"
-
external_ref = create_external_ref(component_ps[:name],type)
-
self[:external_ref] = nailed(external_ref)
-
self[:basic_type] = t("service") #TODO: stub
-
self[:component_type] = t(processed_name)
-
dependencies = dependencies(component_ps)
-
if component_ps.has_key?(:only_one_per_node)
-
self[:only_one_per_node] = t(component_ps[:only_one_per_node])
-
end
-
self[:dependencies] = dependencies unless dependencies.empty?
-
set_attributes(component_ps)
-
end
-
-
1
private
-
1
def object_attributes()
-
[:attributes,:dependencies,:link_defs]
-
end
-
1
def dependencies(component_ps)
-
ret = DSLArray.new
-
ret += find_foreign_resource_names(component_ps).map do |name|
-
create(:dependency,{:type => :foreign_dependency, :name => name})
-
end
-
# TODO: may be more dependency types
-
ret
-
end
-
-
1
def set_attributes(component_ps)
-
attr_num = 0
-
self[:attributes] = DSLArray.new
-
(component_ps[:attributes]||[]).each{|attr_ps|add_attribute(attr_ps,component_ps,attr_num+=1)}
-
-
(component_ps[:children]||[]).each do |child_ps|
-
if child_ps.is_imported_collection?()
-
add_attribute(child_ps,component_ps,attr_num+=1)
-
elsif child_ps.is_exported_resource?()
-
add_attribute(child_ps,component_ps,attr_num+=1)
-
end
-
end
-
end
-
-
1
def add_attribute(parse_structure,component_ps,attr_num)
-
opts = {:attr_num => attr_num, :parent => self, :parent_source => component_ps}
-
self[:attributes] << create(:attribute,parse_structure,opts)
-
end
-
-
1
def find_foreign_resource_names(component_ps)
-
ret = Array.new
-
(component_ps[:children]||[]).each do |child|
-
next unless child.is_defined_resource?()
-
name = child[:name]
-
next unless is_foreign_component_name?(name)
-
ret << name unless ret.include?(name)
-
end
-
ret
-
end
-
-
# for render_hash
-
1
def display_name?()
-
end
-
1
def label?()
-
end
-
1
def basic_type?()
-
end
-
1
def type?()
-
end
-
1
def component_type?()
-
end
-
1
def only_one_per_node?()
-
end
-
-
1
def converted_dependencies(opts)
-
nil #TODO: stub
-
end
-
-
1
def converted_link_defs(opts)
-
return nil unless lds = self[:link_defs]
-
lds.map_element(:skip_required_is_false => true){|ld|ld.render_hash_form(opts)}
-
end
-
-
1
def converted_attributes(opts)
-
attrs = self[:attributes]
-
return nil if attrs.nil? or attrs.empty?
-
ret = RenderHash.new
-
attrs.each_element(:skip_required_is_false => true) do |attr|
-
hash_key = attr.hash_key
-
ret[hash_key] = attr.render_hash_form(opts)
-
end
-
ret
-
end
-
end
-
-
1
class Dependency < self
-
1
def initialize(data,context,opts={})
-
super(context,opts)
-
return if opts[:reify]
-
self[:type] = nailed(data[:type].to_s)
-
case data[:type]
-
when :foreign_dependency
-
self[:name] = data[:name]
-
else raise Error.new("Unexpected dependency type (#{data[:type]})")
-
end
-
end
-
end
-
-
# TODO: see if makes sense to handle the exported resource link defs heere while using store confif helper file to deal with attributes
-
1
module LinkDefDSLMixin
-
1
def initialize(data,context,opts={})
-
super(context,opts)
-
return if opts[:reify]
-
case data[:type]
-
when :imported_collection then initialize__imported_collection(data)
-
else raise Error.new("unexpeced link def type (#{data[:type]})")
-
end
-
end
-
end
-
-
1
class LinkDef < self
-
1
include LinkDefDSLMixin
-
1
private
-
1
def object_attributes()
-
[:possible_links]
-
end
-
-
1
def initialize__imported_collection(data)
-
self[:include] = true
-
self[:required] = nailed(true)
-
self[:type] = t(data[:attr_exp_rsc].parent.hash_key)
-
self[:possible_links] = (DSLArray.new << create(:link_def_possible_link,data))
-
end
-
end
-
-
1
class LinkDefPossibleLink < self
-
1
include LinkDefDSLMixin
-
1
def initialize__imported_collection(data)
-
self[:include] = true
-
output_component = data[:attr_exp_rsc].parent.hash_key
-
set_hash_key(output_component)
-
self[:type] = nailed("external")
-
StoreConfigHandler.add_attribute_mappings!(self,data)
-
end
-
1
def create_attribute_mapping(input,output,opts={})
-
data = {:input => input, :output => output}
-
data.merge!(:include => true) if opts[:include]
-
create(:link_def_attribute_mapping,data)
-
end
-
1
private
-
1
def object_attributes()
-
[:attribute_mappings]
-
end
-
end
-
-
1
class LinkDefAttributeMapping < self
-
1
def initialize(data,context,opts={})
-
super(context,opts)
-
return if opts[:reify]
-
self[:include] = true if data[:include]
-
self[:output] = data[:output]
-
self[:input] = data[:input]
-
end
-
end
-
-
-
1
class Attribute < self
-
1
def initialize(parse_struct,context,opts={})
-
super(context,opts)
-
return if opts[:reify]
-
set_source_ref(parse_struct)
-
if parse_struct.is_attribute?()
-
initialize__from_attribute(parse_struct)
-
elsif parse_struct.is_exported_resource?()
-
initialize__from_exported_resource(parse_struct)
-
elsif parse_struct.is_imported_collection?()
-
initialize__from_imported_collection(parse_struct)
-
else
-
raise Error.new("Unexpected parse structure type (#{parse_struct.class.to_s})")
-
end
-
end
-
-
1
def attr_num()
-
(@context||[])[:attr_num]
-
end
-
-
1
def reset_hash_key_and_name_fields(new_key_x)
-
new_key = set_hash_key(new_key_x)
-
set_field_name(new_key)
-
set_label(new_key)
-
set_external_ref_name(new_key)
-
end
-
-
1
def set_hash_key(key_x)
-
key = key_x
-
num = 1
-
existing_keys = existing_hash_keys()
-
while existing_hash_keys().include?(key)
-
key = "#{key_x}#{(num+=1).to_s}"
-
end
-
super(key)
-
end
-
-
1
private
-
1
def initialize__from_attribute(attr_ps)
-
name = sanitize_attribute(attr_ps[:name])
-
set_hash_key(name)
-
set_field_name(name)
-
set_label(name)
-
self[:label] = t(name)
-
self[:description] = unknown
-
self[:type] = t("string") #default that can be overriten
-
var_default = nil
-
if default = attr_ps[:default]
-
if default.set_default_value?()
-
self[:type] = t(default.data_type)
-
var_default = default.contains_variable?()
-
self[:default_info] = var_default ? unknown : t(default.default_value())
-
end
-
end
-
if var_default
-
self[:required] = t(false)
-
else
-
self[:required] = (attr_ps.has_key?(:required) ? nailed(attr_ps[:required]) : unknown)
-
self[:include] = true if attr_ps[:required]
-
end
-
-
type = "#{config_agent_type}_attribute"
-
ext_ref = create_external_ref(attr_ps[:name],type)
-
ext_ref.merge!("default_variable" => default.to_s) if var_default
-
self[:external_ref] = nailed(ext_ref)
-
end
-
-
1
def set_field_name(name)
-
self[:field_name] = t(name)
-
end
-
-
1
def set_label(label)
-
self[:label] = t(label)
-
end
-
-
1
def set_external_ref_name(name)
-
self[:external_ref] && self[:external_ref]["name"] = name
-
end
-
-
1
def initialize__from_exported_resource(exp_rsc_ps)
-
StoreConfigHandler.set_output_attribute!(self,exp_rsc_ps)
-
end
-
1
def initialize__from_imported_collection(imp_coll_ps)
-
StoreConfigHandler.set_intput_attribute!(self,imp_coll_ps)
-
end
-
-
1
def existing_hash_keys()
-
((parent||{})[:attributes]||[]).map{|a|a.hash_key}.compact
-
end
-
-
# render hash methods
-
1
def display_name?()
-
end
-
end
-
end
-
end
-
end; end
-
3
module DTK; class ModuleDSL; class GenerateFromImpl
-
1
module StoreConfigHandlerMixin
-
1
class StoreConfigHandler
-
1
extend CommonMixin
-
1
def self.set_output_attribute!(attribute_meta,exp_rsc_ps)
-
klass = ret_klass(exp_rsc_ps[:name])
-
klass.process_output_attr!(attribute_meta,exp_rsc_ps)
-
end
-
1
def self.set_intput_attribute!(attribute_meta,imp_coll_ps)
-
klass = ret_klass(imp_coll_ps[:type])
-
klass.process_input_attr!(attribute_meta,imp_coll_ps)
-
end
-
-
1
def self.add_attribute_mappings!(link_def_poss_link,data)
-
resource_type = data[:attr_imp_coll].source_ref[:type]
-
klass = ret_klass(resource_type)
-
klass.process_storeconfig_attr_mapping!(link_def_poss_link,data)
-
klass.process_extra_attr_mappings!(link_def_poss_link,data)
-
end
-
1
private
-
1
def self.ret_klass(type)
-
ret = nil
-
begin
-
ret = XYZ::StoreConfigHandlerMixin.const_get "#{type.capitalize}ERH"
-
rescue
-
raise Error.new("processor for builtin type (#{type}) not treated yet")
-
end
-
ret
-
end
-
-
1
def self.process_output_attr!(attr_meta,exp_rsc_ps)
-
hash_key = hash_key_for_output_attr(exp_rsc_ps)
-
# resassign hash_key because attr_meta.set_hash_key can renumber for dups
-
hash_key = attr_meta.set_hash_key(hash_key)
-
name = hash_key
-
attr_meta[:include] = nailed(true)
-
attr_meta[:field_name] = t(name)
-
attr_meta[:description] = unknown
-
attr_meta[:type] = t("string") #TODO: stub
-
attr_meta[:dynamic] = nailed(true)
-
ext_ref = create_external_ref(name,"puppet_exported_resource")
-
augment_ext_ref_for_output_attr!(ext_ref,exp_rsc_ps)
-
attr_meta[:external_ref] = nailed(ext_ref)
-
end
-
-
1
def self.process_input_attr!(attr_meta,imp_coll_ps)
-
hash_key = hash_key_for_input_attr(imp_coll_ps)
-
# resassign hash_key because attr_meta.set_hash_key can renumber for dups
-
hash_key = attr_meta.set_hash_key(hash_key)
-
name = hash_key
-
attr_meta[:include] = nailed(true)
-
attr_meta[:field_name] = t(name)
-
attr_meta[:description] = unknown
-
attr_meta[:type] = t("string") #TODO: stub
-
ext_ref = create_external_ref(name,"puppet_imported_collection")
-
augment_ext_ref_for_input_attr!(ext_ref,imp_coll_ps)
-
attr_meta[:external_ref] = nailed(ext_ref)
-
end
-
-
1
def self.process_storeconfig_attr_mapping!(link_def_poss_link,data)
-
attr_mappings = link_def_poss_link[:attribute_mappings] ||= MetaArray.new
-
input = {:component => data[:attr_imp_coll].parent.hash_key, :attribute => data[:attr_imp_coll].hash_key}
-
output = {:component => data[:attr_exp_rsc].parent.hash_key, :attribute => data[:attr_exp_rsc].hash_key}
-
attr_mappings << link_def_poss_link.create_attribute_mapping(input,output,{:include => true})
-
end
-
-
1
def self.process_extra_attr_mappings!(link_def_poss_link,data)
-
matching_vars = data[:matching_vars]
-
return if matching_vars.nil? or matching_vars.empty?
-
matching_vars.each{|match|process_extra_attr_mapping!(link_def_poss_link,match,data)}
-
end
-
-
1
def self.hash_key_for_output_attr(exp_rsc_ps)
-
title_param = (exp_rsc_ps[:parameters]||[]).find{|exp|exp[:name] == "title"}
-
sanitize_attribute("#{exp_rsc_ps[:name]}--#{title_param[:value].to_s(:just_variable_name => true)}")
-
end
-
1
def self.augment_ext_ref_for_output_attr!(ext_ref,exp_rsc_ps)
-
title_param = (exp_rsc_ps[:parameters]||[]).find{|exp|exp[:name] == "title"}
-
ext_ref["resource_type"] = exp_rsc_ps[:name]
-
ext_ref["title_with_vars"] = title_param[:value].structured_form()
-
ext_ref
-
end
-
-
1
def self.hash_key_for_input_attr(imp_coll_ps)
-
attr_exprs = imp_coll_ps[:query].attribute_expressions()||[]
-
postfix = attr_exprs.map{|a|"#{a[:name]}__#{a[:value].to_s(:just_variable_name => true)}"}.join("--")
-
sanitize_attribute("#{imp_coll_ps[:type]}--#{postfix}")
-
end
-
1
def self.augment_ext_ref_for_input_attr!(ext_ref,imp_coll_ps)
-
ext_ref["resource_type"] = imp_coll_ps[:type]
-
# TODO: think can deprecate
-
# ext_ref["import_coll_query"] = imp_coll_ps[:query].structured_form()
-
ext_ref
-
end
-
-
1
def self.param_values_to_s(params)
-
params.map{|p|SimpleOrderedHash.new([{:name => p[:name]},{:value => p[:value].to_s}])}
-
end
-
1
def self.attr_expr_values_to_s(attr_exprs)
-
attr_exprs.map{|a|SimpleOrderedHash.new([{:name => a[:name]},{:op => a[:op]},{:value => a[:value].to_s}])}
-
end
-
-
-
1
def self.content_variables_in_output_var(exp_rsc_ps,attr_meta)
-
content = (exp_rsc_ps[:parameters]||[]).find{|exp|exp[:name] == "content"}
-
return Array.new unless content and content[:value]
-
-
if template = content[:value].template?()
-
pp "debug: handle content with template #{template.to_s}"
-
return Array.new
-
end
-
ret = content[:value].variable_list()
-
return Array.new if ret.empty?
-
# prune variables that appear already; need parent source
-
existing_attr_names = (attr_meta.parent_source||{})[:attributes].map{|a|a[:name]}
-
ret.reject{|v|existing_attr_names.include?(v)}
-
end
-
end
-
1
class FileERH < StoreConfigHandler
-
1
def self.process_extra_attr_mapping!(link_def_poss_link,match,data)
-
attr_mappings = link_def_poss_link[:attribute_mappings] ||= MetaArray.new
-
return unless match[:name] == "tag" and match[:input_var].is_variable? and match[:output_var].is_variable?
-
input_component = data[:attr_imp_coll].parent.hash_key
-
input = {:component => input_component,:attribute => match[:input_var][:value]}
-
output_component = data[:attr_exp_rsc].parent.hash_key
-
output = {:component => output_component,:attribute => match[:output_var][:value]}
-
attr_mappings << link_def_poss_link.create_attribute_mapping(input,output)
-
end
-
end
-
end
-
end; end; end
-
2
module DTK; class ModuleDSL
-
1
class IncrementalGenerator
-
1
def self.generate(aug_object)
-
klass(aug_object).new().generate(ObjectWrapper.new(aug_object))
-
end
-
-
1
def self.merge_fragment_into_full_hash!(full_hash,object_class,fragment,context={})
-
klass(object_class).new().merge_fragment!(full_hash,fragment,context)
-
full_hash
-
end
-
-
1
private
-
1
def self.klass(object_or_class)
-
klass = (object_or_class.kind_of?(Class) ? object_or_class : object_or_class.class)
-
class_last_part = klass.to_s.split('::').last
-
ret = nil
-
begin
-
ret = const_get class_last_part
-
rescue
-
raise Error.new("Generation of type (#{class_last_part}) not treated")
-
end
-
ret
-
end
-
-
1
def set?(key,content,obj)
-
val = obj[key]
-
unless val.nil?
-
content[key.to_s] = val
-
end
-
end
-
-
1
def component_fragment(full_hash,component_template)
-
unless component_type = component_template && component_template.get_field?(:component_type)
-
raise Error.new("The method merge_fragment needs the context :component_template")
-
end
-
component().get_fragment(full_hash,component_type)
-
end
-
-
1
class ObjectWrapper
-
1
attr_reader :object
-
1
def initialize(object)
-
@object = object
-
end
-
1
def required(key)
-
ret = @object[key]
-
if ret.nil?
-
raise Error.new("Expected that object of type (#{@object}) has non null key (#{key})")
-
end
-
ret
-
end
-
1
def [](key)
-
@object[key]
-
end
-
end
-
end
-
end; end
-
2
module DTK; class ModuleDSL
-
1
class ObjectModelForm
-
1
extend Aux::CommonClassMixin
-
-
1
def self.convert(input_hash)
-
new.convert(input_hash)
-
end
-
-
1
private
-
1
def convert_to_hash_form(obj,&block)
-
self.class.convert_to_hash_form(obj,&block)
-
end
-
1
def self.convert_to_hash_form(obj,&block)
-
if obj.kind_of?(Hash)
-
obj.each_pair{|k,v|block.call(k,v)}
-
else
-
obj = [obj] unless obj.kind_of?(Array)
-
obj.each do |el|
-
if el.kind_of?(Hash)
-
block.call(el.keys.first,el.values.first)
-
else #el.kind_of?(String)
-
block.call(el,Hash.new)
-
end
-
end
-
end
-
end
-
-
1
ModCmpDelim = "__"
-
1
CmpPPDelim = '::'
-
1
def convert_to_internal_cmp_form(cmp)
-
self.class.convert_to_internal_cmp_form(cmp)
-
end
-
1
def self.convert_to_internal_cmp_form(cmp)
-
cmp.gsub(Regexp.new(CmpPPDelim),ModCmpDelim)
-
end
-
# TODO: above should call DTK::Component methods and not need the Constants here
-
1
def component_print_form(cmp_internal_form)
-
self.class.component_print_form(cmp_internal_form)
-
end
-
1
def self.component_print_form(cmp_internal_form)
-
::DTK::Component.display_name_print_form(cmp_internal_form)
-
end
-
-
1
def matching_key?(key_or_keys,input_hash)
-
if key_or_keys.kind_of?(Array)
-
keys = key_or_keys
-
if match = keys.find{|k|input_hash.has_key?(k)}
-
input_hash[match]
-
end
-
else
-
key = key_or_keys
-
input_hash[key]
-
end
-
end
-
-
1
class InputHash < Hash
-
1
def initialize(hash={})
-
unless hash.empty?()
-
replace(convert(hash))
-
end
-
end
-
-
1
def req(key)
-
key = key.to_s
-
unless has_key?(key)
-
raise ParsingError::MissingKey.new(key)
-
end
-
self[key]
-
end
-
1
private
-
1
def convert(item)
-
if item.kind_of?(Hash)
-
item.inject(InputHash.new){|h,(k,v)|h.merge(k => convert(v))}
-
elsif item.kind_of?(Array)
-
item.map{|el|convert(el)}
-
else
-
item
-
end
-
end
-
end
-
-
1
class OutputHash < Hash
-
1
def initialize(hash={})
-
unless hash.empty?()
-
replace(hash)
-
end
-
end
-
1
def set_if_not_nil(key,val)
-
self[key] = val unless val.nil?
-
end
-
end
-
-
end
-
end; end
-
-
1
module DTK
-
1
class ModuleDSL
-
1
class ParsingError < ErrorUsage::Parsing
-
1
r8_nested_require('parsing_error','ref_component_templates')
-
1
r8_nested_require('parsing_error','link_def')
-
1
r8_nested_require('parsing_error','dependency')
-
1
r8_nested_require('parsing_error','missing_key')
-
1
r8_nested_require('parsing_error','illegal_keys')
-
-
1
def initialize(msg='',*args_x)
-
args = Params.add_opts(args_x,:error_prefix => ErrorPrefix,:caller_info => true)
-
super(msg,*args)
-
end
-
1
ErrorPrefix = 'Component dsl parsing error'
-
-
1
class MissingFromModuleRefs < self
-
1
def initialize(params={})
-
missing_modules = params[:modules]
-
what = (missing_modules.size==1 ? "component module" : "component modules")
-
is = (missing_modules.size==1 ? "is" : "are")
-
does = (missing_modules.size==1 ? "does" : "do")
-
refs = missing_modules.join(',')
-
-
err_msg = "The following #{what} (#{refs}) that #{is} referenced in includes section #{does} not exist in module refs file; this can be rectified by invoking the 'push' command after manually adding appropriate component module(s) to module refs file or by removing references in the DSL file(s)"
-
# err_msg = "Component module(s) (?name) referenced in includes section are not specified in module refs file"
-
err_params = Params.new(:modules => params[:modules].join(','))
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class BadNamespaceReference < self
-
1
def initialize(params={})
-
err_msg = "Namespace (?name) referenced in module_refs file does not exist in local environment"
-
err_params = Params.new(:name => params[:name])
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class BadPuppetDefinition < self
-
1
def initialize(params={})
-
component = params[:component]
-
invalid_names = params[:invalid_names]
-
# missing_req_or_def = params[:missing_req_or_def]
-
-
if invalid_names
-
err_msg =
-
(invalid_names.size == 0) ? "The following component (?name) that is mapped to puppet definition does not have designated name attribute"
-
: "The following component (?name) that is mapped to puppet definition has multiple attributes designated as being the puppet definition name"
-
# elsif missing_req_or_def
-
# err_msg = "The following component (?name) that is mapped to puppet definition has name attribute that is not marked as required or does not have default value"
-
end
-
-
err_params = Params.new(:name => params[:component])
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class AmbiguousModuleRef < self
-
1
def initialize(params={})
-
err_msg = "Reference to ?module_type module (?module_name) is ambiguous; it belongs to the namespaces (?namespaces); one of these namespaces should be selected by editing the module_refs file"
-
-
err_params = Params.new(
-
:module_type => params[:module_type],
-
:module_name => params[:module_name],
-
:namespaces => params[:namespaces].join(',')
-
)
-
super(err_msg,err_params)
-
end
-
end
-
-
end
-
end
-
end
-
-
-
1
module DTK
-
1
class ModuleDSL
-
1
class ParsingError
-
1
class Dependency < self
-
1
def self.create(msg,dep_choice,*args)
-
dep = (dep_choice.respond_to?(:print_form) ? dep_choice.print_form() : dep_choice)
-
hash_params = {
-
:base_cmp => dep_choice.base_cmp_print_form(),
-
:dep_cmp => dep_choice.dep_cmp_print_form(),
-
:dep => dep
-
}
-
create_with_hash_params(msg,hash_params,*args)
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleDSL
-
1
class ParsingError
-
1
class IllegalKeys < self
-
1
def initialize(key_or_keys)
-
keys = (key_or_keys.kind_of?(Array) ? key_or_keys : [key_or_keys])
-
super(keys.size == 1 ? "illegal key (#{keys.first})" : "illegal keys (#{keys.join(',')})")
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleDSL
-
1
class ParsingError
-
1
class LinkDef < self
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleDSL
-
1
class ParsingError
-
1
class MissingKey < self
-
1
def initialize(key)
-
super("missing key (#{key})")
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleDSL
-
1
class ParsingError
-
1
class RefComponentTemplates < self
-
1
def initialize(ref_cmp_templates)
-
super(err_msg(ref_cmp_templates))
-
@ref_cmp_templates = ref_cmp_templates
-
end
-
-
1
private
-
1
def err_msg(ref_cmp_templates)
-
msgs_per_cmp_template = msgs_per_cmp_template(ref_cmp_templates)
-
ident = " "
-
ref_errors = ident + msgs_per_cmp_template.join("\n#{ident}")
-
size = msgs_per_cmp_template.size
-
what = (size==1 ? "component template" : "component templates")
-
"The result if the changes were made would be the following #{what}\n would be deleted while still being referenced by existing assembly templates:\n#{ref_errors}"
-
end
-
-
1
def msgs_per_cmp_template(ref_cmp_templates)
-
ref_cmp_templates.map do |ref_cmp_template|
-
cmp_tmpl_name = ref_cmp_template[:component_template].display_name_print_form
-
assembly_templates = ref_cmp_template[:assembly_templates]
-
Assembly::Template.augment_with_namespaces!(assembly_templates)
-
assembly_templates.map do |assembly_template|
-
assembly_template_name = Assembly::Template.pretty_print_name(assembly_template,:include_namespace=>true)
-
"Component Template (#{cmp_tmpl_name}) is referenced by assembly template (#{assembly_template_name})"
-
end
-
end.flatten(1)
-
end
-
end
-
-
end
-
end
-
end
-
-
-
1
module DTK
-
1
class ModuleDSL
-
1
class RefIntegrity
-
1
r8_nested_require('ref_integrity','snapshot')
-
-
1
def initialize(component_module)
-
@component_module = component_module
-
@snapshot = Snapshot.new(component_module)
-
end
-
1
private :initialize
-
-
1
def self.snapshot_associated_assembly_templates(component_module)
-
new(component_module)
-
end
-
-
1
def raise_error_if_any_violations(opts={})
-
raise_error_if_dangling_cmp_ref(opts)
-
raise_error_if_dangling_port_link()
-
# raise_error_if_dangling_cmp_attr_ref()
-
end
-
-
1
def integrity_post_processing()
-
add_new_ports_on_component_templates()
-
end
-
-
1
def raise_error_if_missing_from_module_refs(include_modules,module_refs_modules={})
-
if inc_modules = include_modules['includes']
-
missing = []
-
ref_component_modules = module_refs_modules.component_modules.keys
-
inc_modules.each do |im|
-
missing << im unless ref_component_modules.include?(im.to_sym)
-
end
-
-
raise ParsingError::MissingFromModuleRefs.new(:modules => missing) unless missing.empty?
-
end
-
end
-
-
1
private
-
1
def raise_error_if_dangling_cmp_ref(opts={})
-
referenced_cmp_template_ids = @snapshot.component_template_ids()
-
return if referenced_cmp_template_ids.empty?
-
# this is called within transaction after any deletes are performed (if any)
-
# TODO: have ModuleDSL.update_model return if any deletes
-
# below is the conservative thing to do if dont know if any deletes
-
any_deletes = true
-
any_deletes = false if opts[:no_deletes_performed]
-
return unless any_deletes
-
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id],
-
:filter => [:oneof, :id, referenced_cmp_template_ids]
-
}
-
cmp_template_ids_still_present = Model.get_objs(model_handle(:component),sp_hash).map{|r|r[:id]}
-
referenced_cmp_templates = @snapshot.referenced_cmp_templates(cmp_template_ids_still_present)
-
unless referenced_cmp_templates.empty?
-
raise ParsingError::RefComponentTemplates.new(referenced_cmp_templates)
-
end
-
end
-
-
1
def raise_error_if_dangling_port_link()
-
# TODO: stub
-
end
-
-
1
def add_new_ports_on_component_templates()
-
# find all assembly templates that reference a component template that has a new link def added
-
# this is done by taking a new snapshot (one that is post changes) and seeing in any new link defs
-
new_snapshot = Snapshot.new(@component_module)
-
snapshot_link_def_ids = @snapshot.link_defs.map{|ld|ld[:id]}
-
new_links_defs = new_snapshot.link_defs.reject{|ld|snapshot_link_def_ids.include?(ld[:id])}
-
unless new_links_defs.empty?
-
link_def_info = new_snapshot.create_link_def_info(new_links_defs)
-
ServiceModule::PortProcessing.create_assembly_template_ports?(link_def_info)
-
end
-
end
-
-
1
def model_handle(model_name)
-
@component_module.model_handle(model_name)
-
end
-
end
-
end
-
end
-
2
module DTK; class ModuleDSL
-
1
class RefIntegrity
-
1
class Snapshot
-
1
attr_reader :link_defs
-
1
def initialize(component_module)
-
@component_module = component_module
-
# aug_cmp_templates is array with component ref info augmented to it
-
@aug_cmp_templates = get_aug_cmp_templates(component_module)
-
@ports = get_ports(@aug_cmp_templates)
-
@port_links = get_port_links(@ports)
-
@link_defs = get_link_defs(@aug_cmp_templates)
-
end
-
-
1
def component_template_ids(aug_cmp_templates=nil)
-
aug_cmp_templates ||= @aug_cmp_templates
-
aug_cmp_templates.map{|cmp_template|cmp_template.id()}
-
end
-
-
1
def referenced_cmp_templates(exclude_cmp_template_ids)
-
pruned_aug_cmp_templates = @aug_cmp_templates.reject{|ct|exclude_cmp_template_ids.include?(ct[:id])}
-
ReferencedComponentTemplates.new(pruned_aug_cmp_templates)
-
end
-
-
1
def create_link_def_info(new_links_defs)
-
link_def_info = LinkDef::Info.new
-
-
# link defs indexed by component template
-
ndx_link_defs = new_links_defs.inject(Hash.new) do |h,ld|
-
h.merge(ld[:component_component_id] => ld)
-
end
-
-
@aug_cmp_templates.each do |cmp_template|
-
if link_def = ndx_link_defs[cmp_template[:id]]
-
cmp_template[:component_refs].each do |cmp_ref|
-
node = cmp_ref[:node]
-
assembly_template = cmp_ref[:assembly_template]
-
el = assembly_template.merge(
-
:node => node,
-
:component_ref => cmp_ref.hash_subset(*LinkDef::Info.component_ref_cols()),
-
:nested_component => cmp_template.hash_subset(*LinkDef::Info.nested_component_cols()),
-
:link_def => link_def
-
)
-
link_def_info << el
-
end
-
end
-
end
-
link_def_info.add_link_def_links!()
-
end
-
-
1
private
-
# writing the get function sso can be passed explicitly refernce object or can use the internal @ vars
-
1
def get_aug_cmp_templates(component_module=nil)
-
component_module ||= @component_module
-
component_module.get_associated_assembly_cmp_refs()
-
end
-
1
def get_link_defs(aug_cmp_templates=nil)
-
aug_cmp_templates ||= @aug_cmp_templates
-
ret = Array.new
-
cmp_template_ids = component_template_ids(aug_cmp_templates)
-
if cmp_template_ids.empty?
-
return ret
-
end
-
sp_hash = {
-
:cols => LinkDef.common_columns()+[:ref,:component_component_id],
-
:filter => [:oneof,:component_component_id,cmp_template_ids]
-
}
-
Model.get_objs(model_handle(:link_def),sp_hash,:keep_ref_cols => true)
-
end
-
1
def get_ports(aug_cmp_templates=nil)
-
aug_cmp_templates ||= @aug_cmp_templates
-
ret = Array.new
-
cmp_template_ids = component_template_ids(aug_cmp_templates)
-
if cmp_template_ids.empty?
-
return ret
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:ref,:display_name,:component_id,:node_node_id,:node],
-
:filter => [:oneof,:component_id,cmp_template_ids]
-
}
-
Model.get_objs(model_handle(:port),sp_hash,:keep_ref_cols => true)
-
end
-
1
def get_port_links(ports=nil)
-
ports ||= @ports
-
ret = Array.new
-
if ports.empty?
-
return ret
-
end
-
port_ids = ports.map{|p|p[:id]}
-
sp_hash = {
-
:cols => [:id,:group_id,:input_id,:output_id],
-
:filter => [:or, [:oneof,:input_id,port_ids],[:oneof,:output_id,port_ids]]
-
}
-
Model.get_objs(model_handle(:port_link),sp_hash)
-
end
-
-
1
def model_handle(model_name)
-
@component_module.model_handle(model_name)
-
end
-
-
1
class ReferencedComponentTemplates < Array
-
1
def initialize(aug_cmp_templates)
-
super(ref_cmp_templates(aug_cmp_templates))
-
end
-
1
private
-
1
def ref_cmp_templates(aug_cmp_templates)
-
ret = Array.new
-
if aug_cmp_templates.empty?
-
return ret
-
end
-
ndx_ret = Hash.new
-
aug_cmp_templates.each do |cmp_tmpl|
-
ndx = cmp_tmpl[:id]
-
cmp_tmpl[:component_refs].map do |aug_cmp_ref|
-
pntr = ndx_ret[ndx] ||= {:component_template => aug_cmp_ref.hash_subset(*CmpTemplateCols), :assembly_templates => Array.new}
-
existing_assembly_templates = pntr[:assembly_templates]
-
assembly_template = aug_cmp_ref[:assembly_template]
-
assembly_template_id = assembly_template[:id]
-
unless existing_assembly_templates.find{|assem|assem[:id] == assembly_template_id}
-
existing_assembly_templates<< assembly_template
-
end
-
end
-
end
-
ndx_ret.values
-
end
-
1
CmpTemplateCols = [:id,:display_name,:group_id,:component_type,:version=,:module_branch_id]
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class ModuleDSL
-
1
module UpdateModelMixin
-
1
def update_model(opts={})
-
input_hash =
-
if opts.empty?
-
@input_hash
-
elsif opts[:version]
-
modify_for_version_and_override_attrs(@input_hash,opts[:version],opts[:override_attrs])
-
else
-
add_component_override_attrs(@input_hash,opts[:override_attrs])
-
end
-
-
self.class.add_components_from_dsl(@project_idh,@config_agent_type,@impl_idh,input_hash,nil,opts)
-
end
-
-
1
class Parser
-
1
def initialize(impl_idh,module_branch_idh,project_idh)
-
@impl_idh = impl_idh
-
@module_branch_idh = module_branch_idh
-
@project_idh = project_idh
-
@remote_link_defs = Hash.new
-
@components_hash = Hash.new
-
@stored_components_hash = Hash.new
-
end
-
1
attr_reader :components_hash,:stored_components_hash
-
-
1
def parse_components!(config_agent_type,dsl_hash,namespace)
-
impl_id = impl_idh.get_id()
-
module_branch_id = module_branch_idh.get_id()
-
-
@components_hash = dsl_hash.inject({}) do |h, (r8_hash_cmp_ref,cmp_info)|
-
cmp_ref = component_ref(config_agent_type,r8_hash_cmp_ref,namespace)
-
info = Hash.new
-
cmp_info.each do |k,v|
-
case k
-
# TODO: deprecate this case when remove v1
-
when "external_link_defs"
-
v.each{|ld|(ld["possible_links"]||[]).each{|pl|pl.values.first["type"] = "external"}} #TODO: temp hack to put in type = "external"
-
parsed_link_def = LinkDef.parse_serialized_form_local(v,config_agent_type,@remote_link_defs,cmp_ref)
-
(info["link_def"] ||= Hash.new).merge!(parsed_link_def)
-
when "link_defs"
-
parsed_link_def = LinkDef.parse_serialized_form_local(v,config_agent_type,@remote_link_defs,cmp_ref)
-
(info["link_def"] ||= Hash.new).merge!(parsed_link_def)
-
else
-
info[k] = v
-
end
-
end
-
info.merge!("implementation_id" => impl_id, "module_branch_id" => module_branch_id)
-
h.merge(cmp_ref => info)
-
end
-
end
-
-
1
private
-
1
attr_reader :impl_idh, :module_branch_idh,:project_idh
-
-
1
def component_ref_from_cmp_type(config_agent_type,component_type)
-
"#{config_agent_type}-#{component_type}"
-
end
-
1
def component_ref(config_agent_type,hash_cmp_ref,namespace)
-
ref_wo_ns = "#{config_agent_type}-#{hash_cmp_ref}"
-
Namespace.join_namespace(namespace, ref_wo_ns)
-
end
-
-
end
-
-
1
private
-
1
def add_component_override_attrs(input_hash,override_attrs)
-
if override_attrs
-
input_hash.keys.inject(Hash.new()) do |h,k|
-
h.merge(k => input_hash[k].merge(override_attrs))
-
end
-
else
-
input_hash
-
end
-
end
-
-
1
def modify_for_version_and_override_attrs(input_hash,version,override_attrs)
-
(override_attrs ||= {})["version"] ||= version
-
-
input_hash.keys.inject(Hash.new()) do |h,k|
-
cmp_info = input_hash[k]
-
modified_cmp_info = cmp_info.merge(override_attrs).merge("display_name" => Component.name_with_version(cmp_info["display_name"],version))
-
h.merge(Component.ref_with_version(k,version) => modified_cmp_info)
-
end
-
end
-
end
-
-
1
module UpdateModelClassMixin
-
1
def add_components_from_dsl(project_idh,config_agent_type,impl_idh,dsl_hash,dsl_integer_version=nil,opts={})
-
dsl_integer_version ||= integer_version(dsl_integer_version)
-
module_branch_idh = impl_idh.create_object().get_module_branch().id_handle()
-
parser_proc = create_parser_processor(dsl_integer_version,impl_idh,module_branch_idh,project_idh)
-
parser_proc.parse_components!(config_agent_type,dsl_hash,opts[:namespace])
-
cmps_hash = parser_proc.components_hash()
-
stored_cmps_hash = parser_proc.stored_components_hash()
-
-
# data_source_update_hash form used so can annotate subcomponents with "is complete" so will delete items that are removed
-
db_update_hash = db_update_form(cmps_hash,stored_cmps_hash,module_branch_idh)
-
Model.input_hash_content_into_model(project_idh,db_update_hash)
-
sp_hash = {
-
:cols => [:id,:display_name],
-
:filter => [:and,[:oneof,:ref,cmps_hash.keys],[:eq,:project_project_id,project_idh.get_id()]]
-
}
-
Model.get_objs(project_idh.create_childMH(:component),sp_hash).map{|r|r.id_handle()}
-
end
-
-
1
private
-
1
def create_parser_processor(dsl_integer_version,impl_idh,module_branch_idh,project_idh)
-
klass = load_and_return_version_adapter_class(dsl_integer_version)
-
klass.const_get("Parser").new(impl_idh,module_branch_idh,project_idh)
-
end
-
-
# This marks as complete applicable objects so if objects not present in cmps_input_hash they are deleted
-
1
def db_update_form(cmps_input_hash,non_complete_cmps_input_hash,module_branch_idh)
-
mark_as_complete_constraint = {
-
:module_branch_id=>module_branch_idh.get_id(), #so only delete extra components that belong to same module
-
:node_node_id => nil #so only delete component templates and not component instances
-
}
-
cmp_db_update_hash = cmps_input_hash.inject(DBUpdateHash.new) do |h,(ref,hash_assigns)|
-
h.merge(ref => db_update_form_aux(:component,hash_assigns))
-
end.mark_as_complete(mark_as_complete_constraint)
-
{"component" => cmp_db_update_hash.merge(non_complete_cmps_input_hash)}
-
end
-
-
1
def db_update_form_aux(model_name,hash_assigns)
-
ret = DBUpdateHash.new
-
children_model_names = DB_REL_DEF[model_name][:one_to_many]||[]
-
hash_assigns.each do |key,child_hash|
-
key = key.to_sym
-
if children_model_names.include?(key)
-
child_model_name = key
-
ret[key] = child_hash.inject(DBUpdateHash.new) do |h,(ref,child_hash_assigns)|
-
h.merge(ref => db_update_form_aux(child_model_name,child_hash_assigns))
-
end
-
ret[key].mark_as_complete()
-
else
-
ret[key] = child_hash
-
end
-
end
-
# mark as complete any child that does not appear in hash_assigns
-
(children_model_names - hash_assigns.keys.map{|k|k.to_sym}).each do |key|
-
ret[key] = DBUpdateHash.new().mark_as_complete()
-
end
-
ret
-
end
-
end
-
end; end
-
-
# TODO: this needs to be updated after refactor to parse_and_update_model
-
1
module DTK
-
1
class NodeModuleDSL < ModuleDSL
-
1
r8_nested_require('node_module_dsl','update_model')
-
1
include UpdateModelMixin
-
-
1
def self.parse_and_update_model(node_module,impl_obj,module_branch_idh,version=nil,opts={})
-
# get associated assembly templates before do any updates and use this to see if any referential integrity
-
# problems within transaction after do update; transaction is aborted if any errors found
-
Transaction do
-
node_module_dsl_obj = create_dsl_object_from_impl(impl_obj, opts)
-
raise node_module_dsl_obj if ParsingError.is_error?(node_module_dsl_obj)
-
-
update_opts = {:override_attrs => {"module_branch_id" => module_branch_idh.get_id()}}
-
update_opts.merge!(:version => version) if version
-
node_module_dsl_obj.update_model(update_opts)
-
end
-
end
-
-
# creates a ModuleDSL if file_obj_hash is a dtk meta file
-
1
def self.create_from_file_obj_hash(target_impl,dsl_filename,content,opts={})
-
container_idh = opts[:container_idh]
-
return nil unless isa_dsl_filename?(dsl_filename)
-
parsed_name = parse_dsl_filename(dsl_filename)
-
module_branch_idh = target_impl.get_module_branch().id_handle()
-
opts[:file_path] = dsl_filename
-
input_hash = convert_to_hash(content,parsed_name[:format_type],opts)
-
return input_hash if ParsingError.is_error?(input_hash)
-
ParsingError.trap do
-
new(target_impl.id_handle(),input_hash,module_branch_idh,container_idh)
-
end
-
end
-
-
1
private
-
1
def initialize(impl_idh,version_specific_input_hash,module_branch_idh,container_idh)
-
@input_hash = version_parse_check_and_normalize(version_specific_input_hash)
-
@impl_idh = impl_idh
-
@module_branch_idh = module_branch_idh
-
@container_idh = container_idh
-
end
-
-
# There is just one version now for node modules
-
1
IntegerVersion = 1
-
1
def version_parse_check_and_normalize(version_specific_input_hash)
-
klass = self.class.load_and_return_version_adapter_class(IntegerVersion)
-
# parse_check raises errors if any errors found
-
klass.parse_check(version_specific_input_hash)
-
klass.normalize(version_specific_input_hash)
-
end
-
# Set for load_and_return_version_adapter_class
-
1
def self.adapter_type()
-
"NodeModuleDSL"
-
end
-
1
def self.adapter_dir()
-
"node_module_dsl"
-
end
-
end
-
end
-
-
2
module DTK; class NodeModuleDSL
-
1
module UpdateModelMixin
-
1
def update_model(opts={})
-
pp [:info_to_insert,@input_hash]
-
Log.info("Here code is written that inserts that contents of @input_hash into objects of the form node_image")
-
raise ErrorUsage.new("got here; place where objects must be inserted")
-
# TODO:
-
# db_update_hash = ...
-
#TODO:: this would do teh actual db insert
-
# Model.input_hash_content_into_model(@project_idh,db_update_hash)
-
end
-
end
-
end; end
-
-
1
module DTK
-
1
class ModuleDSLInfo < Hash
-
1
def initialize(hash={})
-
super()
-
replace(hash)
-
end
-
1
def dsl_parse_error=(dsl_parse_error)
-
merge!(:dsl_parse_error => dsl_parse_error)
-
dsl_parse_error
-
end
-
1
def dsl_created_info=(dsl_created_info)
-
merge!(:dsl_created_info => dsl_created_info)
-
dsl_created_info
-
end
-
1
def dsl_updated_info=(dsl_updated_info)
-
merge!(:dsl_updated_info => dsl_updated_info)
-
dsl_updated_info
-
end
-
-
1
def set_external_dependencies?(ext_deps)
-
if ext_deps
-
self[:external_dependencies] ||= ext_deps
-
end
-
end
-
-
1
class Info < Hash
-
1
def initialize(hash={})
-
raise_error_if_illegal_keys(hash.keys)
-
super()
-
replace(hash)
-
end
-
1
def merge(hash)
-
raise_error_if_illegal_keys(hash.keys)
-
super(hash)
-
end
-
1
def merge!(hash)
-
raise_error_if_illegal_keys(hash.keys)
-
super(hash)
-
end
-
1
private
-
1
def raise_error_if_illegal_keys(keys)
-
illegal_keys = keys - legal_keys
-
unless illegal_keys.empty?
-
raise Error.new("Illegal keys (#{illegal_keys.join(',')})")
-
end
-
end
-
end
-
-
# has info about a DSL file that is being generated
-
1
class CreatedInfo < Info
-
1
private
-
1
def legal_keys()
-
[:path,:content,:hash_content]
-
end
-
end
-
# has info about a DSL file that is being updated
-
1
class UpdatedInfo < Info
-
1
private
-
1
def legal_keys()
-
[:msg,:commit_sha]
-
end
-
end
-
end
-
end
-
#TODO: this wil undergoe big changes Jan 2015
-
1
module DTK
-
1
class NodeModule < BaseModule
-
-
1
def self.model_type()
-
:node_module
-
end
-
-
1
def self.component_type()
-
:puppet #hardwired
-
end
-
-
1
def component_type()
-
:puppet #hardwired
-
end
-
-
1
def self.module_specific_type(config_agent_type)
-
config_agent_type
-
end
-
-
1
class DSLParser < DTK::ModuleDSLParser
-
1
def self.module_type()
-
:node_module
-
end
-
1
def self.module_class
-
ModuleDSL
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceModule < Model
-
1
r8_nested_require('service','dsl')
-
1
r8_nested_require('service','service_add_on')
-
1
r8_nested_require('module','auto_import')
-
-
1
extend ModuleClassMixin
-
1
extend AutoImport
-
1
include ModuleMixin
-
1
extend DSLClassMixin
-
1
include DSLMixin
-
1
include ModuleRefs::Mixin
-
-
### standard get methods
-
1
def get_assemblies()
-
get_objs_helper(:assemblies,:component)
-
end
-
-
1
def get_augmented_assembly_nodes()
-
get_objs_helper(:assembly_nodes,:node,:augmented => true)
-
end
-
-
1
def get_referenced_component_refs()
-
ndx_ret = Hash.new
-
get_objs(:cols => [:component_refs]).each do |r|
-
cmp_ref = r[:component_ref]
-
ndx_ret[cmp_ref[:id]] ||= cmp_ref
-
end
-
ndx_ret.values
-
end
-
-
1
def assembly_ref(assembly_name,version_field=nil)
-
assembly_ref = Namespace.join_namespace(module_namespace(),"#{module_name()}-#{assembly_name}")
-
if version_field
-
assembly_ref = assembly_ref__add_version(assembly_ref,version_field)
-
end
-
assembly_ref
-
end
-
1
def assembly_ref__add_version(assembly_ref,version_field)
-
version = ModuleBranch.version_from_version_field(version_field)
-
"#{assembly_ref}--#{version}"
-
end
-
-
1
private :assembly_ref__add_version
-
-
1
def list_component_modules(opts=Opts.new)
-
get_referenced_component_modules(opts).sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
1
def get_referenced_component_modules(opts=Opts.new)
-
# TODO: alternative is to get this by getting the module_refs
-
ret = Array.new
-
cmp_refs = get_referenced_component_refs()
-
return ret if cmp_refs.empty?
-
project = get_project()
-
ret = ComponentRef.get_referenced_component_modules(project,cmp_refs)
-
-
if opts.array(:detail_to_include).include?(:versions)
-
ndx_versions = get_component_module_refs().version_objs_indexed_by_modules()
-
-
ret.each do |mod|
-
if version_obj = ndx_versions[mod.module_name()]
-
mod[:version] = version_obj
-
end
-
end
-
end
-
-
ret
-
end
-
-
### end: get methods
-
-
1
def self.model_type()
-
:service_module
-
end
-
-
1
def self.filter_list!(rows)
-
rows.reject!{|r|Workspace.is_workspace_service_module?(r)}
-
rows
-
end
-
-
1
def delete_object()
-
assembly_templates = get_assembly_templates()
-
-
assoc_assemblies = self.class.get_associated_target_instances(assembly_templates)
-
unless assoc_assemblies.empty?
-
assembly_names = assoc_assemblies.map{|a|a[:display_name]}
-
raise ErrorUsage.new("Cannot delete a service module if one or more of its service instances exist in a target (#{assembly_names.join(',')})")
-
end
-
repos = get_repos()
-
repos.each{|repo|RepoManager.delete_repo(repo)}
-
delete_instances(repos.map{|repo|repo.id_handle()})
-
-
# need to explicitly delete nodes since nodes' parents are not the assembly
-
Assembly::Template.delete_assemblies_nodes(assembly_templates.map{|a|a.id_handle()})
-
-
delete_instance(id_handle())
-
{:module_name => module_name}
-
end
-
-
1
def delete_version?(version,opts={})
-
delete_version(version,opts.merge(:no_error_if_does_not_exist=>true))
-
end
-
1
def delete_version(version,opts={})
-
ret = {:module_name => module_name()}
-
unless module_branch = get_module_branch_matching_version(version)
-
if opts[:no_error_if_does_not_exist]
-
return ret
-
else
-
raise ErrorUsage.new("Version '#{version}' for specified component module does not exist")
-
end
-
end
-
-
unless opts[:donot_delete_meta]
-
assembly_templates = module_branch.get_assemblies()
-
assoc_assemblies = self.class.get_associated_target_instances(assembly_templates)
-
unless assoc_assemblies.empty?
-
assembly_names = assoc_assemblies.map{|a|a[:display_name]}
-
raise ErrorUsage.new("Cannot delete a service module if one or more of its service instances exist in a target (#{assembly_names.join(',')})")
-
end
-
Assembly::Template.delete_assemblies_nodes(assembly_templates.map{|a|a.id_handle()})
-
end
-
-
id_handle = module_branch.id_handle()
-
module_branch.delete_instance(id_handle)
-
ret
-
end
-
-
1
def get_assembly_instances()
-
assembly_templates = get_assembly_templates()
-
assoc_assemblies = self.class.get_associated_target_instances(assembly_templates)
-
-
assoc_assemblies.each do |assoc_assembly|
-
assembly_template = assembly_templates.select{|at| at[:id]==assoc_assembly[:ancestor_id]}
-
nodes = assembly_template.first[:nodes]
-
assoc_assembly[:nodes] = nodes
-
end
-
end
-
-
1
def get_assembly_templates()
-
sp_hash = {
-
:cols => [:module_branches]
-
}
-
mb_idhs = get_objs(sp_hash).map{|r|r[:module_branch].id_handle()}
-
opts = {
-
:filter => [:oneof, :module_branch_id,mb_idhs.map{|idh|idh.get_id()}]
-
}
-
if project = get_project()
-
opts.merge!(:project_idh => project.id_handle())
-
end
-
ndx_ret = Assembly::Template.get(model_handle(:component),opts).inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
Assembly::Template.get_nodes(ndx_ret.values.map{|r|r.id_handle}).each do |node|
-
next if node.is_assembly_wide_node?()
-
assembly = ndx_ret[node[:assembly_id]]
-
(assembly[:nodes] ||= Array.new) << node
-
end
-
ndx_ret.values
-
end
-
-
1
def info_about(about)
-
case about
-
when "assembly-templates".to_sym
-
mb_idhs = get_objs(:cols => [:module_branches]).map{|r|r[:module_branch].id_handle()}
-
opts = {
-
:filter => [:oneof, :module_branch_id,mb_idhs.map{|idh|idh.get_id()}],
-
:detail_level => "nodes",
-
:no_module_prefix => true
-
}
-
if project = get_project()
-
opts.merge!(:project_idh => project.id_handle())
-
end
-
Assembly::Template.list(model_handle(:component),opts)
-
when :components
-
assembly_templates = get_assembly_templates
-
else
-
raise ErrorUsage.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end
-
end
-
-
1
def self.get_project_trees(mh)
-
sp_hash = {
-
:cols => [:id,:display_name,:module_branches]
-
}
-
sm_branch_info = get_objs(mh,sp_hash)
-
-
ndx_targets = get_ndx_targets(sm_branch_info.map{|r|r[:module_branch].id_handle()})
-
mb_idhs = Array.new
-
ndx_ret = sm_branch_info.inject(Hash.new) do |h,r|
-
module_branch = r[:module_branch]
-
mb_idhs << module_branch.id_handle()
-
mb_id = module_branch[:id]
-
content = SimpleOrderedHash.new(
-
[
-
{:name => r.pp_module_branch_name(module_branch)},
-
{:id => mb_id},
-
{:targets => ndx_targets[mb_id]||Array.new},
-
{:assemblies => Array.new}
-
])
-
h.merge(mb_id => content)
-
end
-
-
filter = [:oneof, :module_branch_id,mb_idhs.map{|idh|idh.get_id()}]
-
assembly_mh = mh.createMH(:component)
-
Assembly::Template.list(assembly_mh,:filter => filter,:component_info=>true).each do |r|
-
index = r[:module_branch_id]
-
assemblies = ndx_ret[index][:assemblies]
-
assemblies << SimpleOrderedHash.new([{:name => r[:display_name]}, {:id => r[:id]}, {:nodes => format_for_get_project_trees__nodes(r[:nodes])}])
-
end
-
ndx_ret.values
-
end
-
# TODO: use of SimpleOrderedHash above and below was just used to print out in debuging and could be removed
-
1
class << self
-
1
private
-
1
def format_for_get_project_trees__nodes(nodes)
-
nodes.map{|n|SimpleOrderedHash.new([{:name => n[:node_name]},{:id => n[:node_id]},{:components => format_for_get_project_trees__cmps(n[:components])}])}
-
end
-
-
1
def format_for_get_project_trees__cmps(cmps)
-
cmps.map{|cmp|SimpleOrderedHash.new([{:name => cmp[:component_name]},{:id => cmp[:component_id]},{:description => cmp[:description]}])}
-
end
-
end
-
-
# targets indexed by service_module
-
1
def self.get_ndx_targets(sm_branch_idhs)
-
# TODO: right now: putting in all targets for all service modules;
-
ret = Array.new
-
return ret if sm_branch_idhs.empty?
-
sm_branch_mh = sm_branch_idhs.first.createMH()
-
all_targets = Target.list(sm_branch_mh).map do |r|
-
SimpleOrderedHash.new([{:name => r[:display_name]},{:id => r[:id]},{:description => r[:description]}])
-
end
-
sm_branch_idhs.inject(Hash.new) do |h,sm_branch_idh|
-
h.merge(sm_branch_idh.get_id => all_targets)
-
end
-
end
-
-
1
def self.find(mh,service_module_name_full,library_idh=nil)
-
lib_filter = library_idh && [:and,:library_library_id,library_idh.get_id()]
-
sp_hash = {
-
:cols => [:id,:display_name,:library_library_id],
-
:filter => [:and, [:eq, :ref, service_module_name_full],lib_filter].compact
-
}
-
rows = get_objs(mh,sp_hash)
-
case rows.size
-
when 0 then nil
-
when 1 then rows.first
-
else raise ErrorUsage.new("Cannot find unique service module given service_module_name=#{service_module_name_full}")
-
end
-
end
-
-
1
def self.get_associated_target_instances(assembly_templates)
-
ret = Array.new
-
return ret if assembly_templates.empty?
-
sp_hash = {
-
:cols => [:id, :display_name, :ancestor_id],
-
:filter => [:oneof, :ancestor_id, assembly_templates.map{|r|r[:id]}]
-
}
-
mh = assembly_templates.first.model_handle(:component)
-
get_objs(mh,sp_hash)
-
end
-
-
# TODO: fix what this returns when fix what update_model_from_dsl returns
-
1
def pull_from_remote__update_from_dsl(repo, module_and_branch_info, version=nil)
-
info = module_and_branch_info #for succinctness
-
module_branch_idh = info[:module_branch_idh]
-
module_branch = module_branch_idh.create_object().merge(:repo => repo)
-
-
update_model_from_dsl(module_branch)
-
end
-
-
# returns either parsing error object or nil
-
1
def install__process_dsl(repo,module_branch,local,opts = {})
-
unless local.version.nil?
-
raise Error.new("Not implemented yet ServiceModule#import__dsl with version not equal to nil")
-
end
-
response = update_model_from_dsl(module_branch.merge(:repo => repo), opts) #repo added to avoid lookup in update_model_from_dsl
-
response if ParsingError.is_error?(response)
-
end
-
-
1
private
-
# returns the new module branch
-
1
def create_new_version__type_specific(repo_for_new_branch,new_version,opts={})
-
project = get_project()
-
repo_idh = repo_for_new_branch.id_handle()
-
module_and_branch_info = self.class.create_ws_module_and_branch_obj?(project,repo_idh,module_name(),new_version,module_namespace_obj())
-
module_branch_idh = module_and_branch_info[:module_branch_idh]
-
module_branch_idh.create_object()
-
end
-
-
# TODO: may want to fix up what this returns after fixing up what update_model_from_dsl returns
-
# returns dsl_info
-
1
def update_model_from_clone_changes(commit_sha,diffs_summary,module_branch,version,opts={})
-
if version.kind_of?(ModuleVersion::AssemblyModule)
-
assembly = version.get_assembly(model_handle(:component))
-
opts_finalize = Aux.hash_subset(opts,[:task_action])
-
AssemblyModule::Service.finalize_edit(assembly,opts[:modification_type],self,module_branch,diffs_summary,opts_finalize)
-
else
-
opts.merge!(:ret_dsl_updated_info => Hash.new)
-
response = update_model_from_dsl(module_branch,opts)
-
ret = ModuleDSLInfo.new()
-
if ParsingError.is_error?(response)
-
ret.dsl_parse_error = response
-
else
-
ret.merge!(response)
-
end
-
dsl_updated_info = opts[:ret_dsl_updated_info]
-
unless dsl_updated_info.empty?
-
ret.dsl_updated_info = dsl_updated_info
-
end
-
ret
-
end
-
end
-
-
1
def publish_preprocess_raise_error?(module_branch_obj)
-
# unless get_field?(:dsl_parsed)
-
unless module_branch_obj.dsl_parsed?()
-
raise ErrorUsage.new("Unable to publish module that has parsing errors. Please fix errors and try to publish again.")
-
end
-
-
# get module info for every component in an assembly in the service module
-
module_info = get_component_modules_info(module_branch_obj)
-
pp [:debug_publish_preprocess_raise_error,:module_info,module_info]
-
# check that all component modules are linked to a remote component module
-
=begin
-
# TODO: ModuleBranch::Location: removed linked_remote; taking out this check until have replacement
-
unlinked_mods = module_info.reject{|r|r[:repo].linked_remote?()}
-
unless unlinked_mods.empty?
-
raise ErrorUsage.new("Cannot export a service module that refers to component modules (#{unlinked_mods.map{|r|r[:display_name]}.join(",")}) not already exported")
-
end
-
=end
-
end
-
-
# returns [module_branch,component_modules]
-
1
def get_component_modules_info(module_branch)
-
filter = [:eq, :module_branch_id,module_branch[:id]]
-
component_templates = Assembly.get_component_templates(model_handle(:component),filter)
-
mb_mh = model_handle(:module_branch)
-
cmp_module_branch_idhs = component_templates.map{|r|r[:module_branch_id]}.uniq.map{|id|mb_mh.createIDH(:id => id)}
-
ModuleBranch.get_component_modules_info(cmp_module_branch_idhs)
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceModule
-
1
r8_nested_require('dsl','common')
-
1
r8_nested_require('dsl','assembly_import')
-
1
r8_nested_require('dsl','assembly_export')
-
1
r8_nested_require('dsl','parser')
-
1
r8_nested_require('dsl','parsing_error')
-
1
r8_nested_require('dsl','settings')
-
1
include SettingsMixin
-
-
1
module DSLVersionInfo
-
1
def self.default_integer_version()
-
ret = R8::Config[:dsl][:service][:integer_version][:default]
-
ret && ret.to_i
-
end
-
1
def self.version_to_integer_version(version,opts={})
-
unless integer_version = VersionToIntegerVersion[version.to_s]
-
error_msg = "Illegal version (#{version}) found in assembly dsl file"
-
if file_path = opts[:file_path]
-
error_msg += " (#{file_path})"
-
end
-
raise ErrorUsage.new(error_msg)
-
end
-
integer_version
-
end
-
-
1
def self.integer_version_to_version(integer_version)
-
IntegerVersionToVersion[integer_version]
-
end
-
1
VersionToIntegerVersion = {
-
"0.9.1" => 3,
-
"1.0.0" => 4
-
}
-
1
IntegerVersionToVersion = {
-
1 => nil, #1 and 2 do not have a version stamped in file
-
2 => nil,
-
3 => "0.9.1",
-
4 => "1.0.0"
-
}
-
end
-
-
1
module DSLClassMixin
-
1
def delete_assembly_dsl?(assembly_template_idh)
-
sp_hash = {
-
:cols => [:display_name, :module_branch],
-
:filter => [:eq,:id,assembly_template_idh.get_id()]
-
}
-
assembly_template_mh = assembly_template_idh.createMH()
-
ndx_module_branches = Hash.new
-
Assembly::Template.get_objs(assembly_template_mh,sp_hash).each do |r|
-
module_branch = r[:module_branch]
-
assembly_name = r[:display_name]
-
-
assembly_path = assembly_meta_filename_path(assembly_name,module_branch)
-
is_legacy = is_legacy_service_module_structure?(module_branch)
-
-
# if not legacy structure, delete assembly_name.dtk.assembly.yaml file
-
RepoManager.delete_file?(assembly_path,module_branch) unless is_legacy
-
-
# raise Error.new("need to modify to componsate for fact that what is now needs to be deleted is files and possibly dir; these are assembly file (#{assembly_path}), plus possibly settings files")
-
-
assembly_dir = assembly_meta_directory_path(assembly_name,module_branch)
-
RepoManager.delete_directory?(assembly_dir,module_branch)
-
-
ndx_module_branches[module_branch[:id]] ||= module_branch
-
end
-
ret = nil
-
ndx_module_branches.each_value do |module_branch|
-
RepoManager.push_changes(module_branch)
-
if module_branch[:is_workspace]
-
ret = module_branch.get_module_repo_info()
-
end
-
end
-
ret
-
end
-
-
1
def assembly_meta_filename_path(assembly_name,module_branch)
-
file_type = dsl_files_format_type()
-
if is_legacy_service_module_structure?(module_branch)
-
"assemblies/#{assembly_name}/assembly.#{file_type}"
-
else
-
"assemblies/#{assembly_name}.dtk.assembly.#{file_type}"
-
end
-
end
-
-
1
def assembly_meta_directory_path(assembly_name,module_branch)
-
"assemblies/#{assembly_name}"
-
end
-
-
1
def assembly_workflow_meta_filename_path(assembly_name,task_action)
-
file_type = dsl_files_format_type()
-
"assemblies/#{assembly_name}/workflows/#{task_action}.#{file_type}"
-
end
-
-
# returns [meta_files,regexp]
-
1
def meta_files_and_regexp?(module_branch)
-
meta_files,regexp,is_legacy_structure = meta_files_regexp_and_is_legacy?(module_branch)
-
[meta_files,regexp]
-
end
-
-
1
def is_legacy_service_module_structure?(module_branch)
-
meta_files,regexp,is_legacy_structure = meta_files_regexp_and_is_legacy?(module_branch)
-
is_legacy_structure
-
end
-
-
1
private
-
-
# returns [meta_files,regexp,is_legacy_structure]
-
1
def meta_files_regexp_and_is_legacy?(module_branch)
-
# determine if new structure or not
-
is_legacy_structure = false
-
meta_files,regexp = meta_files_and_regexp_aux?(AssemblyFilenamePathInfo,module_branch)
-
if meta_files.empty?
-
meta_files,regexp = meta_files_and_regexp_aux?(AssemblyFilenamePathInfoLegacy,module_branch)
-
is_legacy_structure = !meta_files.empty?
-
end
-
[meta_files,regexp,is_legacy_structure]
-
end
-
-
1
AssemblyFilenamePathInfo = {
-
:regexp => Regexp.new("^assemblies/(.*)\.dtk\.assembly\.(json|yaml)$"),
-
:path_depth => 3
-
}
-
-
1
AssemblyFilenamePathInfoLegacy = {
-
:regexp => Regexp.new("^assemblies/([^/]+)/assembly\.(json|yaml)$"),
-
:path_depth => 3
-
}
-
-
1
def meta_file_assembly_name(meta_file_path)
-
(meta_file_path.match(AssemblyFilenamePathInfo[:regexp])||[])[1] ||
-
(meta_file_path.match(AssemblyFilenamePathInfoLegacy[:regexp])||[])[1]
-
end
-
1
public :meta_file_assembly_name
-
-
# returns [meta_files, regexp]
-
1
def meta_files_and_regexp_aux?(assembly_dsl_path_info,module_branch)
-
depth = assembly_dsl_path_info[:path_depth]
-
meta_files = RepoManager.ls_r(depth,{:file_only => true},module_branch)
-
regexp = assembly_dsl_path_info[:regexp]
-
[meta_files.select{|f|f =~ regexp},regexp]
-
end
-
-
1
def dsl_files_format_type()
-
format_type_default = R8::Config[:dsl][:service][:format_type][:default]
-
case format_type_default
-
when "json" then "json"
-
when "yaml" then "yaml"
-
else raise Error.new("Unexpected value for dsl.service.format_type.default: #{format_type_default}")
-
end
-
end
-
end
-
-
1
module DSLMixin
-
# TODO: fix what update_model_from_dsl returns by looking at its usage
-
1
def update_model_from_dsl(module_branch,opts={})
-
module_branch.set_dsl_parsed!(false)
-
-
component_module_refs = update_component_module_refs(module_branch,opts)
-
return component_module_refs if ParsingError.is_error?(component_module_refs)
-
-
v_namespaces = validate_module_ref_namespaces(module_branch,component_module_refs)
-
return v_namespaces if ParsingError.is_error?(v_namespaces)
-
-
parsed, component_module_refs = update_assemblies_from_dsl(module_branch,component_module_refs,opts)
-
if new_commit_sha = component_module_refs.serialize_and_save_to_repo?()
-
if opts[:ret_dsl_updated_info]
-
msg = "The module refs file was updated by the server"
-
opts[:ret_dsl_updated_info] = ModuleDSLInfo::UpdatedInfo.new(:msg => msg, :commit_sha => new_commit_sha)
-
end
-
end
-
return parsed if ParsingError.is_error?(parsed)
-
-
module_branch.set_dsl_parsed!(true)
-
-
# return component modules required by this service module
-
parsed.merge!(:component_module_refs => component_module_refs.component_modules)
-
parsed
-
end
-
-
1
private
-
1
def update_component_module_refs(module_branch,opts={})
-
ModuleRefs::Parse.update_component_module_refs(ServiceModule,module_branch,opts)
-
end
-
-
# returns[ parsed,new_component_module_refs]
-
1
def update_assemblies_from_dsl(module_branch,component_module_refs,opts={})
-
ret_cmr = component_module_refs
-
project_idh = get_project.id_handle()
-
module_name = module_name()
-
module_branch_idh = module_branch.id_handle()
-
-
# check if service instances are using assembly template before changes
-
service_instances = get_assembly_instances()
-
validate_service_instance_references(service_instances, module_branch) unless service_instances.empty?
-
-
assembly_import_helper = AssemblyImport.new(project_idh,module_branch,self,component_module_refs)
-
aggregate_errors = ParsingError::Aggregate.new(:error_cleanup => proc{error_cleanup()})
-
assembly_meta_file_paths(module_branch) do |meta_file,default_assembly_name|
-
aggregate_errors.aggregate_errors!() do
-
file_content = RepoManager.get_file_content(meta_file,module_branch)
-
format_type = meta_file_format_type(meta_file)
-
opts.merge!(:file_path => meta_file,:default_assembly_name => default_assembly_name)
-
-
hash_content = Aux.convert_to_hash(file_content,format_type,opts)||{}
-
return [hash_content,ret_cmr] if ParsingError.is_error?(hash_content)
-
-
# check if comp_name.dtk.assembly.yaml matches name in that file
-
# only perform check for new service module structure
-
unless self.class.is_legacy_service_module_structure?(module_branch)
-
response = validate_name_for_assembly(meta_file,hash_content['name'])
-
return [response,ret_cmr] if ParsingError.is_error?(response)
-
end
-
-
# check if assembly_wide_components exist and add them to assembly_wide node
-
parse_assembly_wide_components!(hash_content)
-
-
parsed = assembly_import_helper.process(module_name,hash_content,opts)
-
return [parsed,ret_cmr] if ParsingError.is_error?(parsed)
-
end
-
end
-
errors = aggregate_errors.raise_error?(:do_not_raise => true)
-
return [errors,ret_cmr] if errors.is_a?(ParsingError)
-
-
parsed = assembly_import_helper.import()
-
-
if response = create_setting_objects_from_dsl(project_idh,module_branch)
-
if ParsingError.is_error?(response)
-
return [response,ret_cmr]
-
end
-
end
-
-
if opts[:auto_update_module_refs]
-
# TODO: should also update teh contents of component module refs
-
ret_cmr = ModuleRefs.get_component_module_refs(module_branch)
-
end
-
-
[parsed,ret_cmr]
-
end
-
-
# signature is assembly_meta_file_paths(module_branch) do |meta_file,default_assembly_name|
-
1
def assembly_meta_file_paths(module_branch, &block)
-
meta_files,regexp = ServiceModule.meta_files_and_regexp?(module_branch)
-
ret_with_removed_variants(meta_files).each do |meta_file|
-
default_assembly_name = (if meta_file =~ regexp then $1; end)
-
block.call(meta_file,default_assembly_name)
-
end
-
end
-
-
1
def validate_service_instance_references(service_instances, module_branch)
-
assembly_names = []
-
assembly_names_with_templates = {}
-
-
meta_files, regexp = ServiceModule.meta_files_and_regexp?(module_branch)
-
assembly_file_paths = ret_with_removed_variants(meta_files)
-
assembly_file_paths.each {|path| assembly_names << ServiceModule.meta_file_assembly_name(path)}
-
-
service_instances.each do |instance|
-
if parent = instance.copy_as_assembly_instance.get_parent
-
parent_name = parent[:display_name]
-
assembly_names_with_templates.merge!(instance[:display_name] => parent_name) unless assembly_names.include?(parent_name)
-
end
-
end
-
-
unless assembly_names_with_templates.empty?
-
instances = assembly_names_with_templates.keys
-
templates = assembly_names_with_templates.values.uniq
-
-
is = (instances.size == 1) ? 'is' : 'are'
-
it = (templates.size == 1) ? 'it' : 'them'
-
-
raise ErrorUsage.new("Cannot delete assembly template(s) '#{templates.join(', ')}' because service instance(s) '#{instances.join(', ')}' #{is} referencing #{it}.")
-
end
-
end
-
-
1
def ret_with_removed_variants(paths)
-
# if multiple files that match where one is json and one yaml, favor the default one
-
two_variants_found = false
-
common_paths = Hash.new
-
paths.each do |path|
-
if path =~ /(^.+)\.([^\.]+$)/
-
all_but_type,type = $1,$2
-
if common_paths[all_but_type]
-
two_variants_found = true
-
else
-
common_paths[all_but_type] = Array.new
-
end
-
common_paths[all_but_type] << {:type => type, :path => path}
-
else
-
Log.error("Path (#{path}) has unexpected form; skipping 'removing variants analysis'")
-
end
-
end
-
# shortcut
-
return paths unless two_variants_found
-
format_type_default = R8::Config[:dsl][:service][:format_type][:default]
-
ret = Array.new
-
common_paths.each_value do |variant_info|
-
if variant_info.size == 1
-
ret << variant_info[:path]
-
else
-
if match = variant_info.find{|vi|vi[:type] == format_type_default}
-
ret << match[:path]
-
else
-
choices = variant_info.amp{|vi|vi[:path]}.join(', ')
-
raise ErrorUsage.new("Cannot decide between the following meta files to use (#{choices}); deleet all but desired one")
-
end
-
end
-
end
-
ret
-
end
-
-
1
def validate_name_for_assembly(file_path,name)
-
return unless (name || file_path)
-
assembly_name = ServiceModule.meta_file_assembly_name(file_path) || 'UNKNOWN'
-
unless assembly_name.eql?(name)
-
ParsingError::BadAssemblyReference.new(:file_path => file_path, :name => name)
-
end
-
end
-
-
1
def validate_module_ref_namespaces(module_branch,component_module_refs)
-
cmp_modules = component_module_refs.component_modules
-
namespace_mh = module_branch.id_handle().createMH(:namespace)
-
-
sp_hash = {
-
:cols => [:id, :display_name]
-
}
-
namespaces = Model.get_objs(namespace_mh,sp_hash).map{|ns| ns[:display_name]}
-
-
cmp_modules.each do |k,v|
-
v_namespace = v[:namespace_info]
-
return ParsingError::BadNamespaceReference.new(:name => v_namespace) unless namespaces.include?(v_namespace)
-
end
-
end
-
-
1
def parse_assembly_wide_components!(hash_content)
-
return unless (hash_content['assembly'] && hash_content['assembly']['components'])
-
-
assembly_wide_cmps = hash_content['assembly']['components']
-
assembly_wide_cmps = assembly_wide_cmps.is_a?(Array) ? assembly_wide_cmps : [assembly_wide_cmps]
-
-
if hash_content['assembly']['nodes']
-
hash_content['assembly']['nodes'].merge!('assembly_wide' => {'components' => assembly_wide_cmps})
-
else
-
hash_content['assembly']['nodes'] = {'assembly_wide' => {'components' => assembly_wide_cmps}}
-
end
-
end
-
-
# TODO: ref DTK-1619: if we put this back in we need to handle case where cmps has an element with a title like
-
# cmp[title] or mod::cmp[title]; also would want to write or use a method in service/common that does not
-
# hard code '::' put instead takes a component ref and returns a module name
-
# def validate_component_names(hash_content,component_module_refs)
-
# module_refs_cmps = component_module_refs.component_modules.map{|k,v| k.to_s}
-
# nodes = hash_content['assembly']['nodes']||{}
-
# nodes.each do |n_name,n_value|
-
# cmps = n_value['components']
-
# cmps.each do |c|
-
# c_name = c.split('::').first
-
# return ParsingError::BadComponentReference.new(:component_name => c, :node_name => n_name) unless module_refs_cmps.include?(c_name)
-
# end
-
# end
-
-
# module_refs_cmps
-
# end
-
-
1
def meta_file_format_type(path)
-
Aux.format_type(path)
-
end
-
-
1
def error_cleanup()
-
# TODO: this is wrong;
-
# ServiceModule.delete(id_handle())
-
# determine if there is case where this is appropriate or have delete for other objects; can also case on dsl_parsed
-
# TODO: may need to write error cleanup for service module update that does not parse for service module (#{update_object!(:display_name,:dsl_parsed).inspect})")
-
end
-
end
-
end
-
end
-
-
-
1
module DTK
-
1
class ServiceModule
-
1
class AssemblyExport < Hash
-
1
attr_reader :factory
-
1
def self.create(factory,container_idh,service_module_branch,integer_version=nil)
-
integer_version ||= DSLVersionInfo.default_integer_version()
-
klass = load_and_return_version_adapter_class(integer_version)
-
klass.new(factory,container_idh,service_module_branch,integer_version)
-
end
-
-
1
def initialize(factory,container_idh,service_module_branch,integer_version)
-
super()
-
@container_idh = container_idh
-
@service_module_branch = service_module_branch
-
@integer_version = integer_version
-
@factory = factory
-
end
-
1
private :initialize
-
-
1
def save_to_model()
-
Model.input_hash_content_into_model(@container_idh,self,:preserve_input_hash=>true)
-
end
-
-
1
def serialize_and_save_to_repo?()
-
path = assembly_meta_filename_path()
-
ordered_hash_serialized_content = serialize()
-
@service_module_branch.serialize_and_save_to_repo?(path,ordered_hash_serialized_content)
-
path
-
end
-
-
1
private
-
1
include ServiceDSLCommonMixin
-
-
1
def self.load_and_return_version_adapter_class(integer_version)
-
return CachedAdapterClasses[integer_version] if CachedAdapterClasses[integer_version]
-
adapter_name = "v#{integer_version.to_s}"
-
opts = {
-
:class_name => {:adapter_type => "AssemblyExport"},
-
:subclass_adapter_name => true,
-
:base_class => ServiceModule
-
}
-
CachedAdapterClasses[integer_version] = DynamicLoader.load_and_return_adapter_class("assembly_export",adapter_name,opts)
-
end
-
1
CachedAdapterClasses = Hash.new
-
-
1
def assembly_meta_filename_path()
-
ServiceModule.assembly_meta_filename_path(assembly_hash()[:display_name],@service_module_branch)
-
end
-
-
1
def assembly_hash()
-
self[:component].values.first
-
end
-
-
1
def dsl_version?()
-
ServiceModule::DSLVersionInfo.integer_version_to_version(@integer_version)
-
end
-
-
1
def assembly_description?()
-
# @factory.assembly_instance.get_field?(:description)
-
@factory[:description]||@factory[:display_name]
-
end
-
-
1
def component_output_form(component_hash)
-
name = component_name_output_form(component_hash[:display_name])
-
if attr_overrides = component_hash[:attribute_override]
-
{name => attr_overrides_output_form(attr_overrides)}
-
else
-
name
-
end
-
end
-
1
def component_name_output_form(internal_format)
-
internal_format.gsub(/__/,Seperators[:module_component])
-
end
-
-
end
-
end
-
end
-
# converts serialized form into object form
-
2
module DTK; class ServiceModule
-
1
class AssemblyImport
-
1
r8_nested_require('assembly_import','port_ref')
-
1
r8_nested_require('assembly_import','port_mixin')
-
1
include PortMixin
-
1
extend FactoryObjectClassMixin
-
1
def initialize(container_idh,module_branch,service_module,component_module_refs)
-
@container_idh = container_idh
-
@db_updates_assemblies = DBUpdateHash.new("component" => DBUpdateHash.new,"node" => DBUpdateHash.new)
-
@ndx_ports = Hash.new
-
@ndx_assembly_hashes = Hash.new #indexed by ref
-
@module_branch = module_branch
-
@module_name = service_module.module_name()
-
@module_namespace = service_module.module_namespace()
-
@service_module = service_module
-
@component_module_refs = component_module_refs
-
@ndx_version_proc_classes = Hash.new
-
@ndx_assembly_file_paths = Hash.new
-
end
-
-
1
def process(module_name,hash_content,opts={})
-
integer_version = determine_integer_version(hash_content,opts)
-
version_proc_class = load_and_return_version_adapter_class(integer_version)
-
version_proc_class.assembly_iterate(@service_module,hash_content) do |assemblies_hash,node_bindings_hash|
-
aggregate_errors = ParsingError::Aggregate.new()
-
assemblies_hash.each do |ref,assem|
-
if file_path = opts[:file_path]
-
@ndx_assembly_file_paths[ref] = file_path
-
end
-
aggregate_errors.aggregate_errors! do
-
db_updates_cmp = version_proc_class.import_assembly_top(ref,assem,@module_branch,@module_name,opts)
-
@db_updates_assemblies["component"].merge!(db_updates_cmp)
-
-
# parse_node_bindings_hash! with opts below
-
# removes elements of node_bindings_hash that are not of form: {node => node_template}
-
if db_updates_node_bindings = version_proc_class.parse_node_bindings_hash!(node_bindings_hash,:remove_non_legacy=> true)
-
db_updates_cmp.values.first.merge!("node_bindings" => db_updates_node_bindings.mark_as_complete())
-
end
-
-
# if bad node reference, return error and continue with module import
-
imported_nodes = version_proc_class.import_nodes(@container_idh,@module_branch,ref,assem,node_bindings_hash,@component_module_refs,opts)
-
return imported_nodes if ParsingError.is_error?(imported_nodes)
-
-
if workflow_hash = assem["workflow"]
-
if parse_errors = Task::Template::ConfigComponents.find_parse_errors(workflow_hash)
-
return parse_errors
-
end
-
end
-
@db_updates_assemblies["node"].merge!(imported_nodes)
-
@ndx_assembly_hashes[ref] ||= assem
-
@ndx_version_proc_classes[ref] ||= version_proc_class
-
end
-
end
-
aggregate_errors.raise_error?()
-
end
-
end
-
-
1
def import()
-
module_branch_id = @module_branch[:id]
-
mark_as_complete_cmp_constraint = {:module_branch_id=>module_branch_id} #so only delete extra components that belong to same module
-
@db_updates_assemblies["component"].mark_as_complete(mark_as_complete_cmp_constraint)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:eq,:module_branch_id, module_branch_id]
-
}
-
@existing_assembly_ids = Model.get_objs(@container_idh.createMH(:component),sp_hash).map{|r|r[:id]}
-
mark_as_complete_node_constraint = {:assembly_id=>@existing_assembly_ids}
-
@db_updates_assemblies["node"].mark_as_complete(mark_as_complete_node_constraint,:apply_recursively => true)
-
-
Model.input_hash_content_into_model(@container_idh,@db_updates_assemblies)
-
-
add_port_and_port_links()
-
@db_updates_assemblies["component"]
-
end
-
-
1
def self.import_assembly_top(assembly_ref,assembly_hash,module_branch,module_name,opts={})
-
if assembly_hash.empty?
-
raise ParsingError.new("Empty assembly dsl file",opts_file_path(opts))
-
end
-
unless assembly_name = assembly_hash["name"]||opts[:default_assembly_name]
-
raise ParsingError.new("No name associated with assembly dsl file",opts_file_path(opts))
-
end
-
-
{
-
assembly_ref => {
-
"display_name" => assembly_name,
-
"type" => "composite",
-
"description" => assembly_hash['description'],
-
"module_branch_id" => module_branch[:id],
-
"version" => module_branch.get_field?(:version),
-
"component_type" => Assembly.ret_component_type(module_name,assembly_name),
-
"attribute" => import_assembly_attributes(assembly_hash["attributes"],opts)
-
}
-
}
-
end
-
-
1
def self.import_nodes(container_idh,module_branch,assembly_ref,assembly_hash,node_bindings_hash,component_module_refs,opts={})
-
# compute node_to_nb_rs and nb_rs_to_id
-
node_to_nb_rs = node_to_node_binding_rs(assembly_ref,node_bindings_hash,opts)
-
nb_rs_to_id = Hash.new
-
unless node_to_nb_rs.empty?
-
filter = [:oneof, :ref, node_to_nb_rs.values]
-
nb_rs_containter = Library.get_public_library(container_idh.createMH(:library))
-
nb_rs_to_id = nb_rs_containter.get_node_binding_rulesets(filter).inject(Hash.new) do |h,r|
-
h.merge(r[:ref] => r[:id])
-
end
-
end
-
-
aggregate_errors = ParsingError::Aggregate.new()
-
unless nodes = assembly_hash["nodes"]
-
return Hash.new
-
end
-
if nodes.kind_of?(Hash)
-
# no op
-
elsif nodes.kind_of?(String) # corner case: single node with no attributes
-
nodes = {nodes => {}}
-
else
-
raise ParsingError.new("Nodes section is ill-formed",opts_file_path(opts))
-
end
-
ret = nodes.inject(Hash.new) do |h,(node_hash_ref,node_hash)|
-
node_hash ||= Hash.new
-
aggregate_errors.aggregate_errors!(h) do
-
node_ref = assembly_template_node_ref(assembly_ref,node_hash_ref)
-
unless (node_hash||{}).kind_of?(Hash)
-
raise ParsingError.new("The content associated with key (#{node_hash_ref}) should be a hash representing assembly node info",opts_file_path(opts))
-
end
-
type,attributes = import_type_and_node_attributes(node_hash,opts)
-
type = node_hash_ref.eql?('assembly_wide') ? 'assembly_wide' : type
-
node_output = {
-
"display_name" => node_hash_ref,
-
"type" => type,
-
"attribute" => attributes,
-
"*assembly_id" => "/component/#{assembly_ref}"
-
}
-
-
if nb_rs = node_to_nb_rs[node_hash_ref]
-
if nb_rs_id = nb_rs_to_id[nb_rs]
-
node_output["node_binding_rs_id"] = nb_rs_id
-
else
-
# TODO: extend aggregate_errors.aggregate_errors to handle this
-
# We want to import module still even if there are bad node references
-
# we stop importing nodes when run into bad node reference but still continue with module import
-
return ParsingError::BadNodeReference.new(:node_template => nb_rs,:assembly => assembly_hash["name"])
-
end
-
else
-
node_output["node_binding_rs_id"] = nil
-
end
-
-
cmps_output = import_component_refs(container_idh,assembly_hash["name"],node_hash["components"],component_module_refs,opts)
-
return cmps_output if ParsingError.is_error?(cmps_output)
-
-
unless cmps_output.empty?
-
node_output["component_ref"] = cmps_output
-
end
-
h.merge(node_ref => node_output)
-
end
-
end
-
-
aggregate_errors.raise_error?()
-
ret
-
end
-
-
1
def augmented_assembly_nodes()
-
@augmented_assembly_nodes ||= @service_module.get_augmented_assembly_nodes()
-
end
-
-
1
def self.augment_with_parsed_port_names!(ports)
-
ports.each do |p|
-
p[:parsed_port_name] ||= Port.parse_port_display_name(p[:display_name])
-
end
-
end
-
-
1
private
-
1
def determine_integer_version(hash_content,opts={})
-
if version = hash_content["dsl_version"]
-
ServiceModule::DSLVersionInfo.version_to_integer_version(version,opts)
-
elsif hash_content["assemblies"]
-
1
-
elsif hash_content["assembly"]
-
2
-
else
-
ServiceModule::DSLVersionInfo.default_integer_version()
-
end
-
end
-
-
1
def load_and_return_version_adapter_class(integer_version)
-
self.class.load_and_return_version_adapter_class(integer_version)
-
end
-
1
def self.load_and_return_version_adapter_class(integer_version)
-
return CachedAdapterClasses[integer_version] if CachedAdapterClasses[integer_version]
-
adapter_name = "v#{integer_version.to_s}"
-
opts = {
-
:class_name => {:adapter_type => "AssemblyImport"},
-
:subclass_adapter_name => true,
-
:base_class => ServiceModule
-
}
-
CachedAdapterClasses[integer_version] = DynamicLoader.load_and_return_adapter_class("assembly_import",adapter_name,opts)
-
end
-
1
CachedAdapterClasses = Hash.new
-
-
1
def self.parse_node_bindings_hash!(node_bindings_hash,opts={})
-
nil
-
end
-
-
1
def self.import_component_refs(container_idh,assembly_name,components_hash,component_module_refs,opts={})
-
ret = Hash.new
-
unless components_hash
-
return ret
-
end
-
cmps_with_titles = Array.new
-
components_hash = [components_hash] unless components_hash.kind_of?(Array)
-
ret = components_hash.inject(Hash.new) do |h,cmp_input|
-
parse = cmp_ref = nil
-
begin
-
parse = component_ref_parse(cmp_input)
-
cmp_ref = Aux::hash_subset(parse,[:component_type,:version,:display_name])
-
if cmp_ref[:version]
-
cmp_ref[:has_override_version] = true
-
end
-
if cmp_title = parse[:component_title]
-
cmps_with_titles << {:cmp_ref => cmp_ref, :cmp_title => cmp_title}
-
end
-
-
import_component_attribute_info(cmp_ref,cmp_input)
-
-
rescue ParsingError => e
-
return ParsingError.new(e.to_s,opts_file_path(opts))
-
end
-
h.merge(parse[:ref] => cmp_ref)
-
end
-
-
opts_set_matching = {:donot_set_component_templates=>true,:set_namespace=>true}
-
component_module_refs.set_matching_component_template_info?(ret.values,opts_set_matching)
-
set_attribute_template_ids!(ret,container_idh)
-
add_title_attribute_overrides!(cmps_with_titles,container_idh)
-
ret
-
end
-
-
1
def self.import_component_attribute_info(cmp_ref,cmp_input)
-
ret_attribute_overrides(cmp_input).each_pair do |attr_name,attr_val|
-
attr_overrides = import_attribute_overrides(attr_name,attr_val)
-
update_component_attribute_info(cmp_ref,attr_overrides)
-
end
-
end
-
1
def self.output_component_attribute_info(cmp_ref)
-
cmp_ref[:attribute_override] ||= Hash.new
-
end
-
1
def self.update_component_attribute_info(cmp_ref,hash)
-
output_component_attribute_info(cmp_ref).merge!(hash)
-
end
-
-
# These are attributes at the assembly level, as opposed to being at the component or node level
-
1
def self.import_assembly_attributes(assembly_attrs_hash,opts={})
-
assembly_attrs_hash ||= Hash.new
-
unless assembly_attrs_hash.kind_of?(Hash)
-
raise ParsingError.new("Assembly attribute(s) are ill-formed",opts_file_path(opts))
-
end
-
import_attributes_helper(assembly_attrs_hash)
-
end
-
-
# These are attributes at the node level
-
# returns [type,attributes]
-
1
def self.import_type_and_node_attributes(node_hash,opts={})
-
type = Node::Type::Node.stub
-
attributes = import_node_attributes(node_hash["attributes"],opts)
-
if attr_type = attributes["type"]
-
attributes.delete("type")
-
type = Node::Type::NodeGroup.stub
-
end
-
[type,attributes]
-
end
-
1
def self.import_node_attributes(node_attrs_hash,opts={})
-
node_attrs_hash ||= Hash.new
-
unless node_attrs_hash.kind_of?(Hash)
-
raise ParsingError.new("Node attribute(s) are ill-formed",opts_file_path(opts))
-
end
-
# TODO: make sure that each node attribute is legal
-
import_attributes_helper(node_attrs_hash)
-
end
-
1
def self.import_attributes_helper(attr_val_hash)
-
ret = DBUpdateHash.new()
-
attr_val_hash.each_pair do |attr_name,attr_val|
-
ref = dispaly_name = attr_name
-
ret[ref] = {
-
"display_name" => attr_name,
-
"value_asserted" => attr_val,
-
"data_type" => Attribute::Datatype.datatype_from_ruby_object(attr_val)
-
}
-
end
-
ret.mark_as_complete()
-
end
-
-
1
def self.import_attribute_overrides(attr_name,attr_val,opts={})
-
attr_info = {:display_name => attr_name, :attribute_value => attr_val}
-
if opts[:cannot_change]
-
attr_info.merge!(:cannot_change => true)
-
end
-
{attr_name => attr_info}
-
end
-
-
-
1
def self.set_attribute_template_ids!(cmp_refs,container_idh)
-
ret = cmp_refs
-
filter_disjuncts = Array.new
-
ndx_attrs = Hash.new
-
cmp_refs.each_value do |cmp_ref_info|
-
if attrs = cmp_ref_info[:attribute_override]
-
cmp_template_id = cmp_ref_info[:component_template_id]
-
ndx_attrs[cmp_template_id] = {:attrs => attrs,:cmp_ref => cmp_ref_info}
-
disjunct = [:and, [:eq, :component_component_id, cmp_template_id],
-
[:oneof, :display_name, attrs.keys]]
-
filter_disjuncts << disjunct
-
end
-
end
-
return ret if filter_disjuncts.empty?
-
-
filter = (filter_disjuncts.size == 1 ? filter_disjuncts.first : ([:or] + filter_disjuncts))
-
sp_hash = {
-
:cols => [:id,:display_name,:component_component_id],
-
:filter => filter
-
}
-
Model.get_objs(container_idh.createMH(:attribute),sp_hash).each do |r|
-
cmp_template_id = r[:component_component_id]
-
# relies on cmp_ref_info[:attribute_override] keys matching display_name
-
if match = ndx_attrs[cmp_template_id][:attrs][r[:display_name]]
-
match.merge!(:attribute_template_id => r[:id])
-
end
-
end
-
-
# now check attributes not matched;
-
bad_attrs = Array.new
-
ndx_attrs.each_value do |r|
-
r[:attrs].each_pair do |ref,info|
-
unless info[:attribute_template_id]
-
bad_attrs << info.merge(:component_display_name => r[:cmp_ref][:display_name])
-
end
-
end
-
end
-
unless bad_attrs.empty?
-
# TODO: include namespace info
-
bad_attrs_list = bad_attrs.map do |attr_info|
-
cmp_name = Component.display_name_print_form(attr_info[:component_display_name])
-
"#{cmp_name}/#{attr_info[:display_name]}"
-
end
-
attribute = (bad_attrs.size == 1 ? "attribute" : "attributes")
-
raise ParsingError.new("Bad #{attribute} (#{bad_attrs_list.join(', ')}) in assembly template")
-
end
-
ret
-
end
-
-
# cmps_with_titles is an array of hashes with keys :cmp_ref, :cmp_title
-
1
def self.add_title_attribute_overrides!(cmps_with_titles,container_idh)
-
return if cmps_with_titles.empty?
-
cmp_mh = container_idh.createMH(:component)
-
cmp_idhs = cmps_with_titles.map{|r|cmp_mh.createIDH(:id => r[:cmp_ref][:component_template_id])}
-
ndx_title_attributes = Component::Template.get_title_attributes(cmp_idhs).inject(Hash.new) do |h,a|
-
h.merge(a[:component_component_id] => a)
-
end
-
bad_attrs = Array.new
-
cmps_with_titles.each do |r|
-
cmp_ref = r[:cmp_ref]
-
if title_attribute = ndx_title_attributes[cmp_ref[:component_template_id]]
-
pntr = cmp_ref[:attribute_override] ||= Hash.new
-
pntr.merge!(import_attribute_overrides(title_attribute[:display_name],r[:cmp_title],:cannot_change => true))
-
else
-
cmp_name = Component.display_name_print_form(cmp_ref[:display_name])
-
err_msg = "Component referenced by #{cmp_name}"
-
if ns = cmp_ref[:namespace]
-
err_msg << "(namespace: #{ns})"
-
end
-
err_msg << " is missing the title field attribute (usally 'name') or is configured to be a singleton; correct by editing this component module or removing title reference in assembly template)"
-
raise ParsingError.new(err_msg)
-
end
-
end
-
end
-
-
1
def self.opts_file_path(opts)
-
(opts.kind_of?(Opts) ? opts :Opts.new(opts)).slice(:file_path)
-
end
-
end
-
end; end
-
-
2
module DTK; class ServiceModule
-
1
class AssemblyImport
-
1
module PortMixin
-
1
def ports()
-
@ndx_ports.values()
-
end
-
1
private
-
1
def add_port_and_port_links()
-
# port links can only be imported in after ports created
-
# add ports to assembly nodes
-
db_updates_port_links = Hash.new
-
@ndx_assembly_hashes.each do |assembly_ref,assembly|
-
assembly_idh = @container_idh.get_child_id_handle(:component,assembly_ref)
-
ports = add_needed_ports(assembly_idh)
-
version_proc_class = @ndx_version_proc_classes[assembly_ref]
-
opts = Hash.new
-
if file_path = @ndx_assembly_file_paths[assembly_ref]
-
opts[:file_path] = file_path
-
end
-
port_links = version_proc_class.import_port_links(assembly_idh,assembly_ref,assembly,ports,opts)
-
return port_links if ParsingError.is_error?(port_links)
-
-
db_updates_port_links.merge!(port_links)
-
ports.each{|p|@ndx_ports[p[:id]] = p}
-
end
-
# Within import_port_links does the mark as complete for port links
-
Model.input_hash_content_into_model(@container_idh,{"component" => db_updates_port_links})
-
end
-
end
-
-
1
def add_needed_ports(assembly_idh)
-
ret = Array.new
-
assembly = assembly_idh.create_object()
-
link_defs_info = LinkDef::Info.get_link_def_info(assembly)
-
-
create_opts = {:returning_sql_cols => [:link_def_id,:id,:display_name,:type,:connected]}
-
PortProcessing.create_assembly_template_ports?(link_defs_info,create_opts)
-
end
-
end
-
-
1
module PortProcessing
-
1
def self.create_assembly_template_ports?(link_defs_info,opts={})
-
ret = Array.new
-
return ret if link_defs_info.empty?
-
port_mh = link_defs_info.first.model_handle(:port)
-
ndx_existing_ports = get_ndx_existing_ports(port_mh,link_defs_info,opts)
-
# create create-hashes for both local side and remote side ports
-
# Need to index by node because create_from_rows can only insert under one parent
-
ndx_rows = Hash.new
-
link_defs_info.each do |ld_info|
-
if link_def = ld_info[:link_def]
-
node = ld_info[:node]
-
cmp_ref = ld_info[:component_ref]
-
port = Port.ret_port_create_hash(link_def,node,ld_info[:nested_component],:component_ref => cmp_ref)
-
if existing_port_info = (ndx_existing_ports[node[:id]]||{})[port[:ref]]
-
existing_port_info[:matched] = true
-
ret << existing_port_info[:port]
-
else
-
pntr = ndx_rows[node[:id]] ||= {:node => node, :ndx_create_rows => Hash.new}
-
pntr[:ndx_create_rows][port[:ref]] ||= port
-
end
-
end
-
end
-
-
# add the remote ports
-
link_defs_info.generate_link_def_link_pairs do |link_def,link|
-
remote_component_type = link[:remote_component_type]
-
link_defs_info.select{|r|r[:nested_component][:component_type] == remote_component_type}.each do |matching_node_cmp|
-
node = matching_node_cmp[:node]
-
component = matching_node_cmp[:nested_component]
-
cmp_ref = matching_node_cmp[:component_ref]
-
port = Port.ret_port_create_hash(link_def,node,component,:remote_side=>true,:component_ref => cmp_ref)
-
if existing_port_info = (ndx_existing_ports[node[:id]]||{})[port[:ref]]
-
existing_port_info[:matched] = true
-
ret << existing_port_info[:port]
-
else
-
pntr = ndx_rows[node[:id]] ||= {:node => node, :ndx_create_rows => Hash.new}
-
pntr[:ndx_create_rows][port[:ref]] ||= port
-
end
-
end
-
end
-
-
new_rows = Array.new
-
ndx_rows.values.each do |r|
-
create_port_mh = r[:node].model_handle_with_auth_info.create_childMH(:port)
-
new_rows += Model.create_from_rows(create_port_mh,r[:ndx_create_rows].values,opts)
-
end
-
-
# delete any existing ports that match what is being put in now
-
port_idhs_to_delete = Array.new
-
ndx_existing_ports.each_value do |inner_ndx_ports|
-
inner_ndx_ports.each_value do |port_info|
-
unless port_info[:matched]
-
port_idhs_to_delete << port_info[:port].id_handle()
-
end
-
end
-
end
-
unless port_idhs_to_delete.empty?()
-
Model.delete_instances(port_idhs_to_delete)
-
end
-
-
# for new rows need to splice in node info
-
unless new_rows.empty?
-
sp_hash = {
-
:cols => [:id,:node],
-
:filter => [:oneof, :node_node_id, new_rows.map{|p|p[:parent_id]}]
-
}
-
ndx_port_node = Model.get_objs(port_mh,sp_hash).inject(Hash.new) do |h,r|
-
h.merge(r[:id] => r[:node])
-
end
-
new_rows.each{|r|r.merge!(:node => ndx_port_node[r[:id]])}
-
end
-
ret + new_rows
-
end
-
-
1
private
-
# returns hash where each key value has form
-
# PortID:
-
# port: PORT
-
# matched: false
-
1
def self.get_ndx_existing_ports(port_mh,link_defs_info,opts={})
-
ndx_existing_ports = Hash.new
-
nodes = link_defs_info.map{|ld|ld[:node]}
-
return ndx_existing_ports if nodes.empty?
-
-
# make sure duplicate ports are pruned; tried to use :duplicate_refs => :prune_duplicates but bug; so explicitly looking for existing ports
-
sp_hash = {
-
:cols => ([:node_node_id,:ref,:node] + (opts[:returning_sql_cols]||[])).uniq,
-
:filter => [:oneof, :node_node_id, nodes.map{|n|n[:id]}]
-
}
-
-
Model.get_objs(port_mh,sp_hash,:keep_ref_cols => true).each do |r|
-
(ndx_existing_ports[r[:node_node_id]] ||= Hash.new)[r[:ref]] = {:port => r,:matched => false}
-
end
-
ndx_existing_ports
-
end
-
end
-
end;end
-
2
module DTK; class ServiceModule
-
1
class AssemblyImport
-
1
class PortRef < SimpleHashObject
-
1
include ServiceDSLCommonMixin
-
-
1
def self.parse(port_ref,assembly_id_or_opts={})
-
assembly_id = nil
-
err_opts = Opts.new
-
if assembly_id_or_opts.kind_of?(Hash)
-
assembly_id = assembly_id_or_opts[:assembly_id]
-
err_opts.merge!(assembly_id_or_opts)
-
else
-
assembly_id = assembly_id_or_opts
-
end
-
-
# TODO: may need to update this to handle port refs with titles
-
if port_ref =~ PortRefRegex
-
node = $1; cmp_name = $2; link_def_ref = $3
-
hash = {:node => node,:component_type => component_type_internal_form(cmp_name),:link_def_ref => link_def_ref}
-
if assembly_id
-
hash.merge!(:assembly_id => assembly_id)
-
end
-
new(hash)
-
else
-
raise ParsingError.new("Ill-formed port ref (#{port_ref})",err_opts)
-
end
-
end
-
1
def self.parse_component_link(input_node,input_cmp_name,component_link_hash,opts={})
-
err_opts = Opts.new(opts).slice(:file_path)
-
unless component_link_hash.size == 1
-
raise ParsingError.new("Ill-formed component link ?1",component_link_hash,err_opts)
-
end
-
link_def_ref = component_link_hash.keys.first
-
-
cmp_link_value = component_link_hash.values.first
-
cmp_link_value = "assembly_wide#{Seperators[:node_component]}#{cmp_link_value}" unless cmp_link_value.include?(Seperators[:node_component])
-
-
if cmp_link_value =~ ServiceLinkTarget
-
output_node = $1; output_cmp_name = $2
-
input = parsed_endpoint(input_node,input_cmp_name,link_def_ref)
-
output = parsed_endpoint(output_node,output_cmp_name,link_def_ref)
-
{:input => input, :output => output}
-
else
-
raise ParsingError.new("Ill-formed component link ?file_path ?1\nIt should have form: \n ?2",component_link_hash,ServiceLinkLegalForm,err_opts)
-
end
-
end
-
1
PortRefRegex = Regexp.new("(^.+)#{Seperators[:node_component]}(.+)#{Seperators[:component_link_def_ref]}(.+$)")
-
1
ServiceLinkTarget = Regexp.new("(^.+)#{Seperators[:node_component]}(.+$)")
-
1
ServiceLinkLegalForm = "LinkType: Node/Component"
-
-
# ports are augmented with field :parsed_port_name
-
1
def matching_id(aug_ports,opts={})
-
if port_or_error = matching_port(aug_ports,opts)
-
port_or_error.kind_of?(ParsingError) ? port_or_error : port_or_error[:id]
-
end
-
end
-
-
# ports are augmented with field :parsed_port_name
-
1
def matching_port(aug_ports,opts={})
-
aug_ports.find{|port|matching_port__match?(port)} || matching_port__error(opts)
-
end
-
-
1
private
-
1
def self.parsed_endpoint(node,cmp_name,link_def_ref)
-
component_type,title = ComponentTitle.parse_component_display_name(cmp_name)
-
ret_hash = {:node => node,:component_type => component_type_internal_form(component_type), :link_def_ref => link_def_ref}
-
ret_hash.merge!(:title => title) if title
-
new(ret_hash)
-
end
-
1
def self.component_type_internal_form(cmp_type_ext_form)
-
# TODO: this does not take into account that there could be a version on cmp_type_ext_form
-
InternalForm.component_ref(cmp_type_ext_form)
-
end
-
-
1
def matching_port__error(opts={})
-
if opts[:do_not_throw_error]
-
opts_err = Opts.new(opts).slice(:file_path)
-
ParsingError::BadComponentLink.new(self[:link_def_ref],opts[:base_cmp_name],opts_err)
-
else
-
Error.new("Cannot find match to (#{self.inspect})")
-
end
-
end
-
-
1
def matching_port__match?(aug_port)
-
p = aug_port[:parsed_port_name]
-
node = aug_port[:node][:display_name]
-
-
matching_port__match_on_assembly_id?(aug_port) and
-
self[:node] == node and
-
self[:component_type] == p[:component_type] and
-
self[:link_def_ref] == p[:link_def_ref] and
-
self[:title] == p[:title]
-
end
-
-
1
def matching_port__match_on_assembly_id?(aug_port)
-
self[:assembly_id].nil? or (self[:assembly_id] == aug_port[:assembly_id])
-
end
-
-
1
def raise_or_ret_error(err_class,args,opts={})
-
opts_file_path = Aux::hash_subset(opts,[:file_path])
-
err = err_class.new(*args,opts_file_path)
-
opts[:do_not_throw_error] ? err : raise(err)
-
end
-
-
1
class AddOn < self
-
# returns assembly ref, port_ref
-
1
def self.parse(add_on_port_ref,assembly_list)
-
assembly_name,port_ref = (add_on_port_ref =~ AOPortRefRegex; [$1,$2])
-
unless assembly_match = assembly_list.find{|a|a[:display_name] == assembly_name}
-
assembly_names = assembly_list.map{|a|a[:display_name]}
-
Log.error("Assembly name in add-on port link (#{assembly_name}) is illegal; must be one of (#{assembly_names.join(',')})")
-
# raise ErrorUsage.new("Assembly name in add-on port link (#{assembly_name}) is illegal; must be one of (#{assembly_names.join(',')})")
-
end
-
[assembly_name,super(port_ref,assembly_match[:id])]
-
end
-
1
AOSep = Seperators[:assembly_node]
-
1
AOPortRefRegex = Regexp.new("(^[^#{AOSep}]+)#{AOSep}(.+$)")
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
module ServiceDSLCommonMixin
-
1
Seperators = {
-
:module_component => "::", #TODO: if this changes need to change ModCompGsub
-
:component_version => ":",
-
:component_port => "/",
-
:assembly_node => "/",
-
:node_component => "/",
-
:component_link_def_ref => "/",
-
:title_before => '[',
-
:title_after => ']',
-
}
-
1
ModCompInternalSep = "__" #TODO: if this changes need to chage ModCompGsub[:sub]
-
1
ModCompGsub = {
-
:pattern => /(^[^:]+)::/,
-
:sub => '\1__'
-
}
-
1
CmpVersionRegexp = Regexp.new("(^.+)#{Seperators[:component_version]}([0-9]+.+$)")
-
-
# pattern that appears in dsl that designates a component title
-
1
DSLComponentTitleRegex = /(^.+)\[(.+)\]/
-
-
1
module InternalForm
-
1
def self.component_ref(cmp_type_ext_form)
-
cmp_type_ext_form.gsub(ModCompGsub[:pattern],ModCompGsub[:sub])
-
end
-
-
# returns hash with keys
-
# component_type,
-
# version (optional)
-
# title (optional)
-
1
def self.component_ref_info(cmp_type_ext_form)
-
ref = component_ref(cmp_type_ext_form)
-
if ref =~ CmpVersionRegexp
-
type = $1; version = $2
-
else
-
type = ref; version = nil
-
end
-
if type =~ DSLComponentTitleRegex
-
type = $1
-
title = $2
-
ref = ComponentTitle.ref_with_title(type,title)
-
display_name = ComponentTitle.display_name_with_title(type,title)
-
end
-
ret = {:component_type => type}
-
ret.merge!(:version => version) if version
-
ret.merge!(:title => title) if title
-
ret
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceModule
-
1
class DSLParser < DTK::ModuleDSLParser
-
1
def self.module_type()
-
:service_module
-
end
-
1
def self.module_class
-
ServiceModule
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceModule
-
1
class ParsingError < ErrorUsage::Parsing
-
1
r8_nested_require('parsing_error','aggregate')
-
1
r8_nested_require('parsing_error','dangling_component_refs')
-
1
r8_nested_require('parsing_error','removed_service_instance_cmp_ref')
-
1
r8_nested_require('parsing_error','bad_component_link')
-
-
# These can be ovewritten; default is simple behavior that ignores new errors (reports first one)
-
1
def add_with(aggregate_error=nil)
-
aggregate_error || self
-
end
-
1
def add_error_opts(error_opts=Opts.new)
-
self
-
end
-
-
1
class BadNodeReference < self
-
1
def initialize(params={})
-
err_msg = "Bad node template (?node_template) in assembly '?assembly'"
-
err_params = Params.new(:node_template => params[:node_template],:assembly => params[:assembly])
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class BadAssemblyReference < self
-
1
def initialize(params={})
-
err_msg = "Assembly name (?name) does not match assembly name in file path '?file_path'"
-
err_params = Params.new(:file_path => params[:file_path],:name => params[:name])
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class BadNamespaceReference < self
-
1
def initialize(params={})
-
err_msg = "Namespace (?name) referenced in module_refs file does not exist in local environment"
-
err_params = Params.new(:name => params[:name])
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class BadComponentReference < self
-
1
def initialize(params={})
-
err_msg = "The following component template (?component_name) required by node (?node_name) is not referenced in #{ModuleRefs.meta_filename_path()} with its component module and namespace which it belongs to"
-
err_params = Params.new(:component_name => params[:component_name], :node_name => params[:node_name])
-
super(err_msg,err_params)
-
end
-
end
-
-
1
class AmbiguousModuleRef < self
-
1
def initialize(params={})
-
err_msg = "Reference to ?module_type module (?module_name) is ambiguous; it belongs to the namespaces (?namespaces); one of these namespaces should be selected by editing the module_refs file"
-
-
err_params = Params.new(
-
:module_type => params[:module_type],
-
:module_name => params[:module_name],
-
:namespaces => params[:namespaces].join(',')
-
)
-
super(err_msg,err_params)
-
end
-
end
-
end
-
end
-
end
-
2
module DTK; class ServiceModule
-
1
class ParsingError
-
1
class Aggregate
-
1
def initialize(opts={})
-
@aggregate_error = nil
-
@error_cleanup = opts[:error_cleanup]
-
end
-
-
1
def aggregate_errors!(ret_when_err=nil,&block)
-
begin
-
yield
-
rescue DanglingComponentRefs => e
-
@aggregate_error = e.add_with(@aggregate_error)
-
ret_when_err
-
rescue AmbiguousModuleRef => e
-
@aggregate_error = e.add_with(@aggregate_error)
-
ret_when_err
-
rescue Exception => e
-
@error_cleanup.call() if @error_cleanup
-
raise e
-
end
-
end
-
-
1
def raise_error?(opts={})
-
if @aggregate_error
-
@error_cleanup.call() if @error_cleanup
-
error = @aggregate_error.add_error_opts(Opts.new(:log_error => false))
-
opts[:do_not_raise] ? error : raise(error)
-
end
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class ServiceModule
-
1
class ParsingError
-
1
class BadComponentLink < self
-
1
def initialize(link_def_ref,base_cmp_name,opts=Opts.new)
-
err_params = Params.new(:link_def_ref => link_def_ref, :base_cmp_name => base_cmp_name)
-
err_msg = "Component ?base_cmp_name's component link (?link_def_ref) refers to a component instance that does not exist"
-
super(err_msg,err_params,opts)
-
end
-
end
-
end
-
end
-
end
-
2
module DTK; class ServiceModule
-
1
class ParsingError
-
1
class DanglingComponentRefs < self
-
1
attr_reader :cmp_ref_info_list
-
1
def initialize(cmp_ref_info_list,opts={})
-
super(err_msg(cmp_ref_info_list),opts)
-
# each element can be a component ref object or a hash
-
@cmp_ref_info_list = cmp_ref_info_list
-
end
-
-
1
def add_error_opts(error_opts={})
-
error_opts.empty? ? self : self.class.new(@cmp_ref_info_list,error_opts)
-
end
-
-
#
-
# Returns list of missing modules with version
-
#
-
1
def missing_module_list()
-
# forming hash and then getting its vals to remove dups in same <module,version,namepsace>
-
module_hash = @cmp_ref_info_list.inject(Hash.new) do |h,r|
-
module_name = r[:component_type].split('__').first
-
remote_namespace = r[:remote_namespace]
-
ndx = "#{module_name}---#{r[:version]}---#{remote_namespace}"
-
info = {
-
:name => module_name,
-
:version => r[:version]
-
}
-
info.merge!(:remote_namespace => remote_namespace) if remote_namespace
-
h.merge!(ndx => info)
-
end
-
-
module_hash.values
-
end
-
-
# aggregate_error can be nil, a anglingComponentRefs error or other error
-
1
def add_with(aggregate_error=nil)
-
if aggregate_error.nil?
-
self
-
elsif aggregate_error.kind_of?(DanglingComponentRefs)
-
self.class.new(ret_unique_union(@cmp_ref_info_list,aggregate_error.cmp_ref_info_list))
-
else
-
super
-
end
-
end
-
-
1
private
-
1
def ret_unique_union(cmp_refs1,cmp_refs2)
-
ndx_ret = cmp_refs1.inject(Hash.new){|h,r|h.merge(ret_unique_union__ndx(r) => r)}
-
cmp_refs2.inject(ndx_ret){|h,r|h.merge(ret_unique_union__ndx(r) => r)}.values
-
end
-
-
1
def ret_unique_union__ndx(cmp_ref_info)
-
ret = cmp_ref_info[:component_type]
-
if version = cmp_ref_info[:version]
-
ret = "#{ret}(#{version})"
-
end
-
ret
-
end
-
-
1
def err_msg(cmp_ref_info_list)
-
what = (cmp_ref_info_list.size==1 ? "component template" : "component templates")
-
refs = cmp_ref_info_list.map{|cmp_ref_info|print_form(cmp_ref_info)}.compact.join(",")
-
is = (cmp_ref_info_list.size==1 ? "is" : "are")
-
does = (cmp_ref_info_list.size==1 ? "does" : "do")
-
"The following #{what} (#{refs}) that #{is} referenced by assemblies in the service module #{does} not exist; this can be rectified by invoking the 'push' command after manually loading appropriate component module(s) or by removing references in the service DSL file(s)"
-
end
-
-
1
def print_form(cmp_ref_info)
-
ret = ComponentRef.print_form(cmp_ref_info)
-
if version = cmp_ref_info[:version]
-
ret = "#{ret}(#{version})"
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
2
module DTK; class ServiceModule
-
1
class ParsingError
-
1
class RemovedServiceInstanceCmpRef < self
-
1
attr_reader :cmp_ref_info_list
-
1
def initialize(cmp_ref_info_list,opts={})
-
super(err_msg(cmp_ref_info_list),opts)
-
# each element can be a component ref object or a hash
-
@cmp_ref_info_list = cmp_ref_info_list
-
end
-
-
1
private
-
1
def err_msg(cmp_ref_info_list)
-
what = (cmp_ref_info_list.size==1 ? "component" : "components")
-
refs = cmp_ref_info_list.map{|cmp_ref_info|print_form(cmp_ref_info)}.compact.join(",")
-
is = (cmp_ref_info_list.size==1 ? "is" : "are")
-
does = (cmp_ref_info_list.size==1 ? "does" : "do")
-
"You are not allowed to delete #{what} (#{refs}) that #{is} referenced in component module used in this service instance"
-
end
-
-
1
def print_form(cmp_ref_info)
-
ret = ComponentRef.print_form(cmp_ref_info)
-
if version = cmp_ref_info[:version]
-
ret = "#{ret}(#{version})"
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class ServiceModule
-
1
module SettingsMixin
-
1
private
-
1
SettingFilenamePathInfo = {
-
:regexp => Regexp.new("^assemblies/([^/]+)/(.*)\.dtk\.settings\.(json|yaml)$"),
-
:path_depth => 4
-
}
-
-
1
def create_setting_objects_from_dsl(project_idh,module_branch)
-
ret = nil
-
settings_to_add = Hash.new
-
aggregate_errors = ParsingError::Aggregate.new(:error_cleanup => proc{error_cleanup()})
-
setting_meta_file_paths(module_branch) do |meta_file,assembly_name|
-
aggregate_errors.aggregate_errors!() do
-
file_content = RepoManager.get_file_content(meta_file,module_branch)
-
format_type = meta_file_format_type(meta_file)
-
hash_content = Aux.convert_to_hash(file_content,format_type,:file_path => meta_file)||{}
-
return hash_content if ParsingError.is_error?(hash_content)
-
if parsing_error = check_for_parsing_errors(hash_content,:file_path => meta_file)
-
return parsing_error
-
end
-
(settings_to_add[assembly_name] ||= Array.new) << hash_content
-
end
-
end
-
if errors = aggregate_errors.raise_error?(:do_not_raise => true)
-
return errors
-
end
-
return ret if settings_to_add.empty?
-
-
ndx_assembly_name_to_id = Assembly::Template.get_ndx_assembly_names_to_ids(project_idh,self,settings_to_add.keys)
-
settings_to_add.each_pair do |assembly_name,hash_content_array|
-
if assembly_id = ndx_assembly_name_to_id[assembly_name]
-
assembly_idh = project_idh.createIDH(:model_name => :component, :id => assembly_id)
-
create_settings_for_assembly(assembly_idh,hash_content_array)
-
else
-
Log.error("Unexpected that cannot find assembly for (#{assembly_name})")
-
end
-
end
-
ret
-
end
-
-
1
def create_settings_for_assembly(assembly_idh,hash_content_array)
-
db_update_hash = hash_content_array.inject(DBUpdateHash.new()) do |h,hash_content|
-
h.merge(ret_settings_hash(assembly_idh,hash_content))
-
end
-
db_update_hash.mark_as_complete()
-
Model.input_hash_content_into_model(assembly_idh,:service_setting => db_update_hash)
-
end
-
-
1
def check_for_parsing_errors(hash_content,opts)
-
ret = nil
-
unless hash_content['name']
-
return ParsingError.new("Missing 'name' field",AssemblyImport.opts_file_path(opts))
-
end
-
illegal_keys = hash_content.keys - (['name'] + LegalTopSettingKeys)
-
unless illegal_keys.empty?
-
key_or_keys = (illegal_keys.size == 1 ? 'key' : 'keys')
-
legal_keys = "legal keys are: (#{LegalTopSettingKeys.join(',')})"
-
return ParsingError.new("Illegal top level #{key_or_keys} (#{illegal_keys.join(',')}); #{legal_keys}",AssemblyImport.opts_file_path(opts))
-
end
-
ret
-
end
-
1
LegalTopSettingKeys = ['node_bindings','attribute_settings']
-
-
1
def ret_settings_hash(assembly_idh,hash_content)
-
unless ref = hash_content['name']
-
end
-
{
-
ref => {
-
:display_name => hash_content['name'],
-
:node_bindings => hash_content['node_bindings'],
-
:attribute_settings => hash_content['attribute_settings'],
-
:component_component_id => assembly_idh.get_id()
-
}
-
}
-
end
-
-
1
def setting_meta_file_paths(module_branch,&block)
-
setting_dsl_path_info = SettingFilenamePathInfo
-
depth = setting_dsl_path_info[:path_depth]
-
ret = RepoManager.ls_r(depth,{:file_only => true},module_branch)
-
regexp = setting_dsl_path_info[:regexp]
-
ret.reject!{|f|not (f =~ regexp)}
-
ret_with_removed_variants(ret).each do |meta_file|
-
unless assembly_name = (if meta_file =~ regexp then $1; end)
-
raise Error.new("Cannot find assembly name")
-
end
-
block.call(meta_file,assembly_name)
-
end
-
end
-
end
-
end
-
end
-
1
r8_nested_require('service_add_on','service_node_binding')
-
1
module DTK
-
1
class ServiceAddOn < Model
-
1
r8_nested_require('service_add_on','import')
-
###standard get methods
-
1
def get_service_node_bindings()
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:assembly_node_id,:sub_assembly_node_id],
-
:filter => [:eq,:add_on_id,id()]
-
}
-
Model.get_objs(model_handle(:service_node_binding),sp_hash)
-
end
-
-
1
def get_port_links()
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:input_id,:output_id,:output_is_local,:required],
-
:filter => [:eq,:service_add_on_id,id()]
-
}
-
Model.get_objs(model_handle(:port_link),sp_hash)
-
end
-
-
###end standard get methods
-
-
1
def self.import(container_idh,module_name,meta_file,hash_content,ports,aug_assembly_nodes)
-
Import.new(container_idh,module_name,meta_file,hash_content,ports,aug_assembly_nodes).import()
-
end
-
1
def self.dsl_filename_path_info()
-
Import.dsl_filename_path_info()
-
end
-
-
1
def new_sub_assembly_name(base_assembly,sub_assembly_template)
-
# TODO: race condition in time name generated and commited to db
-
existing_sub_assemblies = base_assembly.get_sub_assemblies()
-
name_prefix = "#{base_assembly[:display_name]}::#{sub_assembly_template[:display_name]}"
-
matching_instance_nums = Array.new
-
existing_sub_assemblies.each do |a|
-
if a[:display_name] =~ Regexp.new("^#{name_prefix}(.*$)")
-
suffix = $1
-
suffix_num = (suffix.empty? ? 1 : (suffix =~ /^-([0-9]+$)/; $1))
-
matching_instance_nums << suffix_num.to_i
-
end
-
end
-
if matching_instance_nums.empty?
-
name_prefix
-
else
-
"#{name_prefix}-#{(matching_instance_nums.max+1).to_s}"
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceAddOn
-
1
private
-
1
class Import
-
1
include ServiceDSLCommonMixin
-
1
def initialize(container_idh,module_name,dsl_file,hash_content,ports,aug_assembly_nodes)
-
@container_idh = container_idh
-
@module_name = module_name
-
@dsl_file = dsl_file
-
@hash_content = hash_content
-
@ports = ports
-
augmnent_with_parsed_nanems_and_assembly_ids!(@ports,aug_assembly_nodes)
-
@aug_assembly_nodes = aug_assembly_nodes
-
@assemblies = find_assemblies(aug_assembly_nodes)
-
end
-
1
def import()
-
type = (dsl_file =~ DslRegExp;$1)
-
assembly,assembly_ref = ret_assembly_info(:assembly)
-
sub_assembly,sa_ref = ret_assembly_info(:add_on_sub_assembly)
-
ao_input_hash = {
-
:display_name => type,
-
:description => hash_content["description"],
-
:type => type,
-
:sub_assembly_id => sub_assembly[:id]
-
}
-
port_links = import_add_on_port_links(ports,hash_content["port_links"],assembly,sub_assembly)
-
unless port_links.empty?
-
ao_input_hash.merge!(:port_link => port_links)
-
end
-
-
node_bindings = ServiceNodeBinding.import_add_on_node_bindings(@aug_assembly_nodes,hash_content["node_bindings"])
-
unless node_bindings.empty?
-
ao_input_hash.merge!(:service_node_binding => node_bindings)
-
end
-
-
input_hash = {assembly_ref => {:service_add_on => {type => ao_input_hash}}}
-
Model.import_objects_from_hash(container_idh,"component" => input_hash)
-
end
-
-
1
def self.dsl_filename_path_info()
-
{
-
:regexp => DslRegExp,
-
:path_depth => 4
-
}
-
end
-
-
1
private
-
1
def import_add_on_port_links(ports,add_on_port_links,assembly,sub_assembly)
-
ret = Hash.new
-
return ret if (add_on_port_links||[]).empty?
-
assembly_list = [assembly,sub_assembly]
-
add_on_port_links.each do |ao_pl_ref,ao_pl|
-
link = ao_pl["link"]
-
input_assembly,input_port = add_on_parse(link.values.first,assembly_list)
-
output_assembly,output_port = add_on_parse(link.keys.first,assembly_list)
-
input_id = input_port.matching_id(ports)
-
output_id = output_port.matching_id(ports)
-
output_is_local = (output_assembly == assembly[:display_name])
-
pl_hash = {"input_id" => input_id,"output_id" => output_id, "output_is_local" => output_is_local, "required" => ao_pl["required"]}
-
ret.merge!(ao_pl_ref => pl_hash)
-
end
-
ret
-
end
-
-
1
def add_on_parse(add_on_port_ref,assembly_list)
-
ServiceModule::AssemblyImport::PortRef::AddOn.parse(add_on_port_ref,assembly_list)
-
end
-
-
1
def augment_with_assembly_ids!(ports)
-
nil
-
end
-
-
1
DslRegExp = Regexp.new("add-ons/([^/]+)\.json$")
-
1
attr_reader :container_idh, :module_name, :dsl_file, :hash_content, :ports
-
-
1
def import_port_link(port_link_info)
-
end
-
-
1
def find_assemblies(aug_assembly_nodes)
-
ndx_ret = Hash.new
-
aug_assembly_nodes.each do |n|
-
assembly = n[:assembly]
-
ndx_ret[assembly[:id]] ||= assembly
-
end
-
ndx_ret.values
-
end
-
-
1
def augmnent_with_parsed_nanems_and_assembly_ids!(ports,aug_assembly_nodes)
-
ServiceModule::AssemblyImport.augment_with_parsed_port_names!(ports)
-
ndx_node_assembly = aug_assembly_nodes.inject(Hash.new){|h,n|h.merge(n[:id] => n[:assembly][:id])}
-
ports.each do |p|
-
p[:assembly_id] ||= ndx_node_assembly[p[:node_node_id]]
-
end
-
end
-
-
# returns [assembly,assembly_ref]
-
1
def ret_assembly_info(field)
-
unless name = hash_content[field.to_s]
-
raise ErrorUsage("Field (#{field}) not given in the service add-on file #{dsl_file}")
-
end
-
unless assembly = @assemblies.find{|a|a[:display_name] == name}
-
Log.error("Field (#{field}) has value (#{name}) which is not a valid assembly reference")
-
end
-
raise Error.new("if use need to pass in service_module and call service_module.assembly_ref(name)")
-
# [assembly,ServiceModule.assembly_ref(module_name,name)]
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceNodeBinding < Model
-
1
r8_nested_require('service_node_binding','import')
-
1
def self.import_add_on_node_bindings(aug_assembly_nodes,node_bindings)
-
Import.new(aug_assembly_nodes).import(node_bindings)
-
end
-
end
-
end
-
1
module DTK
-
1
class ServiceNodeBinding
-
1
private
-
1
class Import
-
1
include FactoryObjectMixin
-
1
def initialize(aug_assembly_nodes)
-
@aug_assembly_nodes = aug_assembly_nodes
-
end
-
1
def import(node_bindings)
-
ret = Hash.new
-
return ret if (node_bindings||[]).empty?
-
-
unless node_bindings.kind_of?(Hash)
-
raise ErrorIllFormedTerm.new("node bindings",nil,"is not a hash")
-
end
-
updates = node_bindings.each do |k,v|
-
sub_assembly_node_id,sub_assembly_ref = find_assembly_node_id_and_ref(k)
-
assembly_node_id,assembly_ref = find_assembly_node_id_and_ref(v)
-
hash = {
-
:assembly_node_id => assembly_node_id,
-
:sub_assembly_node_id => sub_assembly_node_id
-
}
-
ref = "#{assembly_ref}---#{sub_assembly_ref}"
-
ret.merge!(ref => hash)
-
end
-
ret
-
end
-
1
private
-
# returns [id,ref]
-
1
def find_assembly_node_id_and_ref(assembly_node_ref)
-
assembly_name,node_name = parse_assembly_node_ref(assembly_node_ref)
-
match = @aug_assembly_nodes.find do |r|
-
r[:assembly][:display_name] == assembly_name and r[:display_name] == node_name
-
end
-
if match
-
ref = assembly_template_node_ref(assembly_name,node_name)
-
[match[:id],ref]
-
else
-
raise ErrorParsing.new("Assembly node ref (#{assembly_node_ref}) does not match any existing assembly node ids")
-
end
-
end
-
-
# returns [assembly_name,node_name]
-
1
def parse_assembly_node_ref(assembly_node_ref)
-
# TODO: should also check that assembly_name is the service add on assembly or sub assembly
-
if assembly_node_ref =~ Regexp.new("(^[^/]+)/([^/]+$)")
-
[$1,$2]
-
else
-
raise ErrorIllFormedTerm.new("assembly node ref",assembly_node_ref)
-
end
-
end
-
end
-
1
public
-
1
class ErrorParsing < ErrorUsage
-
end
-
1
class ErrorIllFormedTerm < ErrorParsing
-
1
def initialize(term,val,alt_descript=nil)
-
super(err_msg(term,val,alt_descript))
-
end
-
1
private
-
1
def err_msg(term,val,alt_descript)
-
last_part =
-
if alt_descript then alt_descript
-
elsif val.kind_of?(String) then "(#{val})"
-
else "(#{val.inspect})"
-
end
-
"Ill-formed #{term} #{last_part}"
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class TestModule < BaseModule
-
-
1
def self.model_type()
-
:test_module
-
end
-
-
1
def self.component_type()
-
:puppet #hardwired
-
end
-
-
1
def component_type()
-
:puppet #hardwired
-
end
-
-
1
def self.module_specific_type(config_agent_type)
-
config_agent_type
-
end
-
-
1
class DSLParser < DTK::ModuleDSLParser
-
1
def self.module_type()
-
:test_module
-
end
-
1
def self.module_class
-
ModuleDSL
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class UpdateModuleOutput < Hash
-
1
def initialize(hash={})
-
super()
-
return if hash.empty?
-
pruned_hash = ret_relevant_keys(hash)
-
replace(pruned_hash)
-
end
-
# create_info is aligned with this object on keys; it just has more info
-
1
def self.create_from_update_create_info(create_info)
-
new(create_info)
-
end
-
1
LegalKeysInfo = {
-
:dsl_parse_error => true,
-
:dsl_updated_info => [:commit_sha,:msg],
-
:dsl_created_info => [:path,:content],
-
:external_dependencies => [:inconsistent,:possibly_missing,:ambiguous]
-
}
-
1
LegalTopKeys = LegalKeysInfo.keys
-
-
1
def set_dsl_updated_info!(msg,commit_sha)
-
ret = self[:dsl_updated_info] ||= Hash.new
-
ret.merge!(:msg => msg) unless msg.nil?
-
ret.merge!(:commit_sha => commit_sha) unless commit_sha.nil?
-
ret
-
end
-
-
1
def external_dependencies()
-
ExternalDependencies.new(self[:external_dependencies]||{})
-
end
-
-
1
def dsl_created_info?()
-
info = self[:dsl_created_info]
-
unless info.nil? or info.empty?
-
DSLCreatedInfo.new(info)
-
end
-
end
-
1
class DSLCreatedInfo < Hash
-
1
def initialize(hash)
-
super()
-
replace(hash)
-
end
-
end
-
-
1
private
-
1
def ret_relevant_keys(hash)
-
ret = Hash.new
-
LegalKeysInfo.each_pair do |top_key,nested_info|
-
if hash.has_key?(top_key)
-
nested = hash[top_key]
-
if nested_info.kind_of?(Array) and nested.kind_of?(Hash)
-
legal_nested_keys = nested_info
-
info = Aux::hash_subset(nested,legal_nested_keys)
-
ret[top_key] = info unless info.empty?
-
else
-
ret[top_key] = nested
-
end
-
end
-
end
-
ret
-
end
-
-
end
-
end
-
1
module DTK
-
1
module ModuleUtils
-
1
class ListMethod
-
-
1
DEFAULT_VERSION = 'CURRENT'
-
-
1
def self.aggregate_detail(branch_module_rows,project_idh,model_type,opts)
-
-
1
project = project_idh.create_object()
-
1
module_mh = project_idh.createMH(model_type)
-
1
diff = opts[:diff]
-
1
remote_repo_base = opts[:remote_repo_base]
-
1
if opts[:include_remotes]
-
1
augment_with_remotes_info!(branch_module_rows,module_mh)
-
end
-
# if there is an external_ref source, use that otherwise look for remote dtkn
-
# there can be duplictes for a module when multiple repos; in which case will agree on all fields
-
# except :repo, :module_branch, and :repo_remotes
-
# index by module
-
1
ndx_ret = Hash.new
-
# aggregate
-
1
branch_module_rows.each do |r|
-
35
module_branch = r[:module_branch]
-
35
module_name = r.module_name()
-
35
ndx_repo_remotes = r[:ndx_repo_remotes]
-
35
ndx = r[:id]
-
35
is_equal = nil
-
35
not_published = nil
-
-
35
if diff
-
if default_remote_repo = RepoRemote.ret_default_remote_repo((ndx_repo_remotes||{}).values)
-
remote = default_remote_repo.remote_dtkn_location(project,model_type,module_name)
-
is_equal = r[:repo].ret_local_remote_diff(module_branch,remote)
-
else
-
not_published = true
-
end
-
end
-
-
35
unless mdl = ndx_ret[ndx]
-
35
r.delete(:repo)
-
35
r.delete(:module_branch)
-
35
mdl = ndx_ret[ndx] = r
-
end
-
35
mdl.merge!(:is_equal => is_equal)
-
35
mdl.merge!(:not_published => not_published)
-
-
35
if opts[:include_versions]
-
(mdl[:version_array] ||= Array.new) << module_branch.version_print_form(Opts.new(:default_version_string => DEFAULT_VERSION))
-
end
-
35
if external_ref_source = module_branch.external_ref_source()
-
22
mdl[:external_ref_source] = external_ref_source
-
end
-
35
if ndx_repo_remotes
-
28
ndx_repo_remotes.each do |remote_repo_id,remote_repo|
-
45
(mdl[:ndx_repo_remotes] ||= Hash.new)[remote_repo_id] ||= remote_repo
-
end
-
end
-
end
-
# put in display name form
-
1
ndx_ret.each_value do |mdl|
-
35
if raw_va = mdl.delete(:version_array)
-
unless raw_va.size == 1 and raw_va.first == DEFAULT_VERSION
-
version_array = (raw_va.include?(DEFAULT_VERSION) ? [DEFAULT_VERSION] : []) + raw_va.reject{|v|v == DEFAULT_VERSION}.sort
-
mdl.merge!(:versions => version_array.join(", "))
-
end
-
end
-
35
external_ref_source = mdl.delete(:external_ref_source)
-
35
ndx_repo_remotes = mdl.delete(:ndx_repo_remotes)
-
-
35
if linked_remote = linked_remotes_print_form((ndx_repo_remotes||{}).values, external_ref_source, {:not_published => mdl[:not_published]})
-
35
mdl.merge!(:linked_remotes => linked_remote)
-
end
-
end
-
1
ndx_ret.values
-
end
-
-
# each branch_module_row has a nested :repo column
-
1
def self.augment_with_remotes_info!(branch_module_rows,module_mh)
-
# index by repo_id
-
36
ndx_branch_module_rows = branch_module_rows.inject(Hash.new){|h,r|h.merge(r[:repo][:id] => r) if r[:repo]}
-
1
unless ndx_branch_module_rows.empty?
-
1
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:repo_id,:repo_name,:repo_namespace,:created_at,:is_default],
-
:filter => [:oneof, :repo_id, ndx_branch_module_rows.keys]
-
}
-
-
1
remotes = Model.get_objs(module_mh.createMH(:repo_remote),sp_hash)
-
-
1
remotes.each do |r|
-
45
ndx = r[:repo_id]
-
45
(ndx_branch_module_rows[ndx][:ndx_repo_remotes] ||= Hash.new).merge!(r[:id] => r)
-
end
-
end
-
end
-
-
1
private
-
-
1
def self.linked_remotes_print_form(repo_remotes, external_ref_source, opts={})
-
35
opts_pp = Opts.new(:dtkn_prefix => true)
-
35
array =
-
if repo_remotes.empty?
-
7
Array.new
-
elsif repo_remotes.size == 1
-
19
[repo_remotes.first.print_form(opts_pp)]
-
else
-
9
default = RepoRemote.ret_default_remote_repo(repo_remotes)
-
35
repo_remotes.reject!{|r|r[:id] == default[:id]}
-
26
[default.print_form(opts_pp.merge(:is_default_namespace => true))] + repo_remotes.map{|r|r.print_form(opts_pp)}
-
end
-
-
35
array << external_ref_source if external_ref_source
-
35
array << "*** NOT PUBLISHED ***" if opts[:not_published]
-
-
35
array.join(JoinDelimiter)
-
end
-
1
JoinDelimiter = ', '
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleVersion < String
-
1
def self.ret(obj)
-
if obj.nil?
-
nil
-
elsif obj.kind_of?(String)
-
if Semantic.legal_format?(obj)
-
Semantic.create_from_string(obj)
-
elsif AssemblyModule.legal_format?(obj)
-
AssemblyModule.create_from_string(obj)
-
end
-
elsif obj.kind_of?(Assembly)
-
AssemblyModule.new(obj.get_field?(:display_name))
-
else
-
raise Error.new("Unexpected object type passed to ModuleVersion.ret (#{obj.class})")
-
end
-
end
-
-
1
def self.string_master_or_empty?(object)
-
ret =
-
if object.nil?
-
true
-
elsif object.kind_of?(String)
-
object.casecmp("master").eql?(0) || object.casecmp("default").eql?(0)
-
end
-
!!ret
-
end
-
-
# Compares version, return true if same
-
1
def self.versions_same?(str1, str2)
-
return true if (string_master_or_empty?(str1) && string_master_or_empty?(str2))
-
# ignore prefix 'v' if present e.g. v4.2.3
-
return (str1||'').gsub(/^v/,'').eql?((str2||'').gsub(/^v/,''))
-
end
-
-
1
class Semantic < self
-
1
def self.create_from_string(str)
-
new(str)
-
end
-
1
def self.legal_format?(str)
-
!!(str =~ /\A\d{1,2}\.\d{1,2}\.\d{1,2}\Z/)
-
end
-
end
-
-
1
class AssemblyModule < self
-
1
attr_reader :assembly_name
-
-
1
def get_assembly(mh)
-
sp_hash = {
-
:cols=> [:id,:group_id,:display_name],
-
:filter => [:and,[:eq,:display_name,@assembly_name],[:neq,:datacenter_datacenter_id,nil]]
-
}
-
rows = Assembly::Instance.get_objs(mh.createMH(:assembly_instance),sp_hash)
-
if rows.size == 1
-
rows.first
-
elsif rows.size == 0
-
raise Error.new("Unexpected that no assemblies associated with (#{inspect})" )
-
else
-
raise Error.new("Unexpected that #{rows.size.to_s} assemblies are associated with (#{inspect})" )
-
end
-
end
-
-
1
def self.legal_format?(str)
-
!!(str =~StringPattern)
-
end
-
1
def self.create_from_string(str)
-
if str =~ StringPattern
-
assembly_name = $1
-
new(assembly_name)
-
end
-
end
-
1
StringPattern = /^assembly--(.+$)/
-
-
1
private
-
1
def initialize(assembly_name)
-
@assembly_name = assembly_name
-
super(version_string(assembly_name))
-
end
-
-
1
def version_string(assembly_name)
-
"assembly--#{assembly_name}"
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleRef < Model
-
1
r8_nested_require('module_ref','version_info')
-
1
r8_nested_require('module_ref','lock')
-
1
r8_nested_require('module_ref','missing')
-
-
1
def self.common_columns()
-
[:id,:display_name,:group_id,:module_name,:module_type,:version_info,:namespace_info,:external_ref,:branch_id]
-
end
-
-
1
def self.reify(mh,object)
-
mr_mh = mh.createMH(:model_ref)
-
ret = version_info = nil
-
if object.kind_of?(ModuleRef)
-
ret = object
-
version_info = VersionInfo::Assignment.reify?(object)
-
else #object.kind_of?(Hash)
-
ret = ModuleRef.create_stub(mr_mh,object)
-
if v = object[:version_info]
-
version_info = VersionInfo::Assignment.reify?(v)
-
end
-
end
-
version_info ? ret.merge(:version_info => version_info) : ret
-
end
-
-
1
def set_module_version(version)
-
merge!(:version_info => VersionInfo::Assignment.reify?(version))
-
self
-
end
-
-
1
def self.find_ndx_matching_component_modules(cmp_module_refs)
-
ret = Hash.new
-
return ret if cmp_module_refs.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:namespace_id,:namespace],
-
:filter => [:or] + cmp_module_refs.map{|r|[:eq,:display_name,r[:module_name]]}
-
}
-
cmp_modules = get_objs(cmp_module_refs.first.model_handle(:component_module),sp_hash)
-
cmp_module_refs.each do |cmr|
-
module_name = cmr[:module_name]
-
namespace = cmr.namespace
-
if cmp_module = cmp_modules.find{|mod|mod[:display_name] == module_name and (mod[:namespace]||{})[:display_name] == namespace}
-
ret[cmr[:id]] = cmp_module
-
end
-
end
-
ret
-
end
-
-
# this finds for each mocule branch the array of component model ref objects associated with the branch
-
1
def self.get_ndx_component_module_ref_arrays(branches)
-
ret = Hash.new
-
return ret if branches.empty?
-
sp_hash = {
-
:cols => common_columns()+[:branch_id],
-
:filter => [:oneof,:branch_id,branches.map{|r|r.id()}]
-
}
-
mh = branches.first.model_handle(:module_ref)
-
get_objs(mh,sp_hash).each do |r|
-
(ret[r[:branch_id]] ||= Array.new) << r
-
end
-
ret
-
end
-
1
def self.get_component_module_ref_array(branch)
-
sp_hash = {
-
:cols => common_columns(),
-
:filter => [:eq,:branch_id,branch.id()]
-
}
-
mh = branch.model_handle(:module_ref)
-
get_objs(mh,sp_hash)
-
end
-
-
1
def self.create_or_update(parent,module_ref_hash_array)
-
update(:create_or_update,parent,module_ref_hash_array)
-
end
-
-
1
def self.update(operation,parent,module_ref_hash_array)
-
return if module_ref_hash_array.empty? and operation == :add
-
rows = ret_create_rows(parent,module_ref_hash_array)
-
model_handle = parent.model_handle.create_childMH(:module_ref)
-
case operation
-
when :create_or_update
-
matching_cols = [:module_name]
-
modify_children_from_rows(model_handle,parent.id_handle(),rows,matching_cols,:update_matching => true,:convert => true)
-
when :add
-
create_from_rows(model_handle,rows)
-
else
-
raise Error.new("Unexpected operation (#{operation})")
-
end
-
end
-
-
1
def version_string()
-
self[:version_info] && self[:version_info].version_string()
-
end
-
-
1
def namespace()
-
unless self[:namespace_info].nil?
-
if self[:namespace_info].kind_of?(String)
-
self[:namespace_info]
-
else
-
raise Error.new("Unexpected type in namespace_info: #{self[:namespace_info].class}")
-
end
-
end
-
end
-
-
1
def dsl_hash_form()
-
ret = Aux.hash_subset(self,DSLHashCols,:only_non_nil=>true)
-
if version_string = version_string()
-
ret.merge!(:version_info => version_string)
-
end
-
if ret[:version_info] and ret[:namespace_info].nil?
-
return ret[:version_info] # simple form
-
end
-
ret
-
end
-
1
DSLHashCols = [:version_info,{:namespace_info => :namespace},:external_ref]
-
-
1
private
-
1
def self.ret_create_rows(parent,module_ref_hash_array)
-
ret = Array.new
-
return ret if module_ref_hash_array.empty?
-
parent_id_assigns = {
-
parent.parent_id_field_name(:module_ref) => parent.id()
-
}
-
module_ref_hash_array.map do |module_ref_hash|
-
assigns =
-
if version_info = module_ref_hash[:version_info]
-
parent_id_assigns.merge(:version_info => version_info.to_s)
-
else
-
assigns = parent_id_assigns
-
end
-
el = Aux.hash_subset(module_ref_hash,[:ref,:display_name,:module_name,:module_type,:namespace_info,:external_ref]).merge(assigns)
-
el[:display_name] ||= display_name(el)
-
el[:ref] ||= ref(el)
-
el
-
end
-
end
-
-
1
def self.display_name(module_ref_hash)
-
[:module_name].each do |key|
-
if module_ref_hash[key].nil?
-
raise Error.new("Unexpected that module_ref_hash[#{key}] is nil")
-
end
-
end
-
module_ref_hash[:module_name]
-
end
-
-
1
def self.ref(module_ref_hash)
-
[:module_type,:module_name].each do |key|
-
if module_ref_hash[key].nil?
-
raise Error.new("Unexpected that module_ref_hash[#{key}] is nil")
-
end
-
end
-
"#{module_ref_hash[:module_type]}--#{module_ref_hash[:module_name]}"
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleRef
-
1
class Lock < Model
-
1
r8_nested_require('lock','info')
-
1
r8_nested_require('lock','persist')
-
-
1
def self.common_columns()
-
[:id,:display_name,:group_id,:module_name,:info,:locked_branch_sha]
-
end
-
-
1
attr_accessor :info
-
1
def initialize(*args,&block)
-
super
-
@info = nil
-
end
-
1
def locked_branch_sha()
-
self[:locked_branch_sha]
-
end
-
1
def locked_branch_sha=(sha)
-
self[:locked_branch_sha] = sha
-
end
-
-
1
def module_name()
-
(@info && @info.module_name) || (Log.error_pp(["Unexpected that no module name",self]); nil)
-
end
-
-
1
def self.create_from_element(assembly_instance,info)
-
ret = create_stub(assembly_instance.model_handle(:module_ref_lock))
-
ret.info = info
-
ret
-
end
-
-
1
def self.persist(module_refs_lock)
-
Persist.persist(module_refs_lock)
-
end
-
-
1
def self.get(assembly_instance)
-
Persist.get(assembly_instance).map{|r|r.reify()}
-
end
-
-
1
def reify()
-
info_hash = self[:info]
-
@info = info_hash && Info.create_from_hash(model_handle,info_hash)
-
self
-
end
-
-
end
-
end
-
end
-
2
module DTK; class ModuleRef
-
1
class Lock
-
1
class Info
-
1
attr_reader :namespace,:module_name,:level,:children_module_names,:external_ref
-
1
attr_accessor :implementation,:module_branch
-
1
def initialize(namespace,module_name,level,extra_fields={})
-
@namespace = namespace
-
@module_name = module_name
-
@level = level
-
@children_module_names = extra_fields[:children_module_names] || []
-
@implementation = extra_fields[:implementation]
-
@module_branch = extra_fields[:module_branch]
-
@external_ref = extra_fields[:external_ref]
-
end
-
-
1
def self.create_from_hash(mh,info_hash)
-
impl = info_hash[:implementation]
-
mb = info_hash[:module_branch]
-
extra_fields = {
-
:children_module_names => info_hash[:children_module_names],
-
:implementation => object_form(mh.createMH(:implementation),info_hash[:implementation]),
-
:module_branch => object_form(mh.createMH(:module_branch),info_hash[:module_branch])
-
}
-
if external_ref = info_hash[:external_ref]
-
extra_fields.merge!(:external_ref => external_ref)
-
end
-
new(info_hash[:namespace],info_hash[:module_name],info_hash[:level],extra_fields)
-
end
-
-
1
def hash_form()
-
ret = {
-
:namespace => @namespace,
-
:module_name => @module_name,
-
:level => @level,
-
:children_module_names => @children_module_names
-
}
-
ret.merge!(:implementation => @implementation) if implementation
-
ret.merge!(:module_branch => module_branch) if module_branch
-
ret.merge!(:external_ref => external_ref) if external_ref
-
ret
-
end
-
-
1
def children_and_this_module_names()
-
[@module_name] + @children_module_names
-
end
-
1
private
-
-
1
def self.object_form(mh,hash)
-
ret = nil
-
return ret unless hash
-
unless id = hash[:id]
-
Log.error_pp(["Unexpected that hash does not have :id field",hash])
-
return ret
-
end
-
mh.createIDH(:id => id).create_object().merge(hash)
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class ModuleRef
-
1
class Lock
-
1
module Persist
-
-
1
def self.persist(module_refs_lock)
-
db_update_hash = db_update_hash_all_elements(module_refs_lock)
-
db_update_hash.mark_as_complete()
-
assembly_instance_idh = module_refs_lock.assembly_instance.id_handle()
-
Model.input_hash_content_into_model(assembly_instance_idh,:module_ref_lock => db_update_hash)
-
module_refs_lock
-
end
-
-
1
def self.get(assembly_instance)
-
sp_hash = {
-
:cols => Lock.common_columns(),
-
:filter => [:eq,:component_component_id,assembly_instance.id]
-
}
-
Model.get_objs(assembly_instance.model_handle(:module_ref_lock),sp_hash)
-
end
-
-
1
private
-
1
def self.db_update_hash_all_elements(module_refs_lock)
-
ret = DBUpdateHash.new()
-
module_refs_lock.each_pair do |module_name,module_ref_lock|
-
if hash = module_ref_lock_hash_form(module_name,module_ref_lock)
-
ret.merge!(hash)
-
end
-
end
-
ret
-
end
-
-
1
def self.module_ref_lock_hash_form(module_name,module_ref_lock)
-
unless info = module_ref_lock.info
-
raise_persistence_error('No Info object found on object',module_ref_lock)
-
end
-
hash_body = {
-
:display_name => module_name,
-
:module_name => module_name,
-
:info => info.hash_form(),
-
:locked_branch_sha => module_ref_lock.locked_branch_sha
-
}
-
{module_name => hash_body}
-
end
-
-
1
def self.raise_persistence_error(msg,module_ref_lock)
-
unless msg =~ /:$/
-
msg = msg + ':'
-
end
-
Log.error_pp([msg,module_ref_lock])
-
end
-
-
end
-
end
-
end; end
-
1
module DTK
-
1
class ModuleRef
-
1
class Missing
-
1
attr_reader :module_name,:namespace
-
1
def initialize(module_name,namespace)
-
@module_name = module_name
-
@namespace = namespace
-
end
-
-
1
def error()
-
Error.new(@module_name,@namespace)
-
end
-
-
1
class Error < ErrorUsage
-
1
def initialize(module_name,namespace)
-
super("Missing module ref '#{namespace}:#{module_name}'")
-
end
-
end
-
end
-
end
-
end
-
-
2
module DTK; class ModuleRef
-
1
class VersionInfo
-
-
1
DEFAULT_VERSION = nil
-
-
1
class Assignment < self
-
1
def initialize(version_string)
-
@version_string = version_string
-
end
-
-
1
attr_reader :version_string
-
-
1
def self.reify?(object)
-
version_string =
-
if object.kind_of?(String)
-
ModuleVersion.string_master_or_empty?(object) ? DEFAULT_VERSION : object
-
elsif object.kind_of?(ModuleRef)
-
object[:version_info]
-
end
-
-
if version_string
-
if ModuleVersion::Semantic.legal_format?(version_string)
-
new(version_string)
-
else
-
raise Error.new("Unexpected form of version string (#{version_string})")
-
end
-
end
-
end
-
-
1
def to_s()
-
@version_string
-
end
-
end
-
-
1
class Constraint < self
-
1
def ret_version()
-
if is_scalar?() then is_scalar?()
-
elsif empty? then nil
-
else
-
raise Error.new("Not treating the version type (#{ret.inspect})")
-
end
-
end
-
-
1
def self.reify?(constraint=nil)
-
if constraint.nil? then new()
-
elsif constraint.kind_of?(Constraint) then constraint
-
elsif constraint.kind_of?(String) then new(constraint)
-
elsif constraint.kind_of?(Hash) and constraint.size == 1 and constraint.keys.first == "namespace"
-
# MOD_RESTRUCT: TODO: need to decide if depracting 'namespace' key
-
Log.info("Ignoring constraint of form (#{constraint.inspect})")
-
new()
-
else
-
raise Error.new("Constraint of form (#{constraint.inspect}) not treated")
-
end
-
end
-
-
1
def include?(version)
-
case @type
-
when :empty
-
nil
-
when :scalar
-
@value == version
-
end
-
end
-
-
1
def is_scalar?()
-
@value if @type == :scalar
-
end
-
-
1
def empty?()
-
@type == :empty
-
end
-
-
1
def to_s()
-
case @type
-
when :scalar
-
@value.to_s
-
end
-
end
-
-
1
private
-
1
def initialize(scalar=nil)
-
@type = (scalar ? :scalar : :empty)
-
@value = scalar
-
end
-
-
end
-
end
-
end; end
-
1
module DTK
-
1
class ModuleRefs
-
1
r8_nested_require('module_refs','mixin')
-
1
r8_nested_require('module_refs','parse')
-
1
r8_nested_require('module_refs','component_dsl_form')
-
1
r8_nested_require('module_refs','matching_templates')
-
1
r8_nested_require('module_refs','tree')
-
1
r8_nested_require('module_refs','lock')
-
1
include MatchingTemplatesMixin
-
-
1
attr_reader :parent, :component_modules
-
1
def initialize(parent,content_hash_form,opts={})
-
@parent = parent
-
@component_modules = opts[:content_hash_form_is_reified] ?
-
content_hash_form :
-
Parse.reify_content(parent.model_handle(:model_ref),content_hash_form)
-
end
-
1
private :initialize
-
-
# This finds module refs that matches branches
-
1
def self.get_multiple_component_module_refs(branches)
-
ndx_branches = branches.inject(Hash.new){|h,r|h.merge(r[:id] => r)}
-
ModuleRef.get_ndx_component_module_ref_arrays(branches).map do |(branch_id,cmr_array)|
-
content_hash_content = cmr_array.inject(Hash.new){|h,r|h.merge(key(r[:module_name]) => r)}
-
new(ndx_branches[branch_id],content_hash_content)
-
end
-
end
-
1
def self.get_component_module_refs(branch)
-
content_hash_content = ModuleRef.get_component_module_ref_array(branch).inject(Hash.new) do |h,r|
-
h.merge(key(r[:module_name]) => r)
-
end
-
new(branch,content_hash_content)
-
end
-
-
# returns true if an update made; this updates the ruby object
-
# each element in the array cmp_modules_with_namespaces
-
# is a component module object with the added field :namespace_name
-
# TODO: DTK-2046
-
# make change here so argument has external_ref info; so might pass in as argument module_ref objects
-
# This might require the persistent module refs to be there
-
1
def update_object_if_needed!(cmp_modules_with_namespaces)
-
ret = false
-
cmp_modules_with_namespaces.each do |cmp_mod|
-
[:display_name,:namespace_name].each do |key|
-
raise Error.new("Unexpected that cmp_modules_with_namespaces element does not have key: #{key}") unless cmp_mod[key]
-
end
-
cmp_mod_name = cmp_mod[:display_name]
-
unless component_module_ref?(cmp_mod_name)
-
add_or_set_component_module_ref(cmp_mod_name,:namespace_info => cmp_mod[:namespace_name])
-
ret = true
-
end
-
end
-
ret
-
end
-
-
# serializes and saves object to repo
-
1
def serialize_and_save_to_repo?(opts={})
-
dsl_hash_form = dsl_hash_form()
-
if !dsl_hash_form.empty? || opts[:ambiguous] || opts[:possibly_missing] || opts[:create_empty_module_refs]
-
meta_filename_path = meta_filename_path()
-
@parent.serialize_and_save_to_repo?(meta_filename_path,dsl_hash_form,nil,opts)
-
end
-
end
-
-
1
def matching_component_module_namespace?(cmp_module_name)
-
if module_ref = component_module_ref?(cmp_module_name)
-
module_ref.namespace()
-
end
-
end
-
-
1
def version_objs_indexed_by_modules()
-
ret = Hash.new
-
component_modules.each_pair do |mod,cmr|
-
if version_info = cmr[:version_info]
-
ret.merge!(mod.to_s => version_info)
-
end
-
end
-
ret
-
end
-
-
1
def update_component_template_ids(component_module)
-
# first get filter so can call get_augmented_component_refs
-
assembly_templates = component_module.get_associated_assembly_templates()
-
return if assembly_templates.empty?
-
filter = [:oneof, :id, assembly_templates.map{|r|r[:id]}]
-
opts = {
-
:filter => filter,
-
:component_module_refs => self,
-
:force_compute_template_id => true
-
}
-
aug_cmp_refs = Assembly::Template.get_augmented_component_refs(component_module.model_handle(:component),opts)
-
return if aug_cmp_refs.empty?
-
cmp_ref_update_rows = aug_cmp_refs.map{|r|r.hash_subset(:id,:component_template_id)}
-
Model.update_from_rows(component_module.model_handle(:component_ref),cmp_ref_update_rows)
-
end
-
-
1
def has_module_version?(cmp_module_name,version_string)
-
if cmp_module_ref = component_module_ref?(cmp_module_name)
-
cmp_module_ref.version_string() == version_string
-
end
-
end
-
-
1
def include_module?(cmp_module_name)
-
component_modules.has_key?(key(cmp_module_name))
-
end
-
-
1
def ret_service_module_info()
-
sp_hash = {
-
:cols => [:service_module_info]
-
}
-
get_obj(sp_hash)
-
end
-
-
1
def set_module_version(cmp_module_name,version)
-
key = key(cmp_module_name)
-
if cmr = @component_modules[key]
-
cmr.set_module_version(version)
-
else
-
hash_content = {
-
:component_module => cmp_module_name,
-
:version_info => version
-
}
-
@component_modules[key] = ModuleRef.reify(@parent.model_handle,hash_content)
-
end
-
ModuleRef.update(:create_or_update,@parent,@component_modules.values)
-
end
-
-
1
def update()
-
module_ref_hash_array = @component_modules.map do |(key,hash)|
-
el = hash
-
unless hash[:module_name]
-
el = el.merge(:module_name => key.to_s)
-
end
-
unless hash[:module_type]
-
el = el.merge(:module_type => 'component')
-
end
-
el
-
end
-
ModuleRef.create_or_update(@parent,module_ref_hash_array)
-
end
-
-
1
def self.clone_component_module_refs(base_branch,new_branch)
-
cmrs = get_component_module_refs(base_branch)
-
ModuleRef.create_or_update(new_branch,cmrs.component_modules.values)
-
end
-
-
1
private
-
1
def self.update(parent, cmp_modules)
-
ModuleRef.create_or_update( parent, cmp_modules.values)
-
end
-
-
1
def component_module_ref?(cmp_module_name)
-
@component_modules[key(cmp_module_name)]
-
end
-
-
1
def add_or_set_component_module_ref(cmp_module_name,mod_ref_hash)
-
@component_modules[key(cmp_module_name)] = ModuleRef.reify(@parent.model_handle(),mod_ref_hash)
-
end
-
-
1
def self.key(el)
-
el.to_sym
-
end
-
1
def key(el)
-
self.class.key(el)
-
end
-
-
1
def self.isa_dsl_filename?(path)
-
path == meta_filename_path()
-
end
-
1
def meta_filename_path()
-
self.class.meta_filename_path()
-
end
-
1
def self.meta_filename_path()
-
ServiceModule::DSLParser.default_rel_path?(:component_module_refs) ||
-
raise(Error.new("Unexpected that cannot compute a meta_filename_path for component_module_refs"))
-
end
-
-
1
def dsl_hash_form()
-
ret = SimpleOrderedHash.new()
-
dsl_hash_form = Hash.new
-
component_modules.each_pair do |cmp_module_name,cmr|
-
hf = cmr.dsl_hash_form()
-
dsl_hash_form[cmp_module_name.to_s] = hf unless hf.empty?
-
end
-
-
if dsl_hash_form.empty?
-
return ret
-
end
-
-
sorted_dsl_hash_form = dsl_hash_form.keys.map{|x|x.to_s}.sort().inject(SimpleOrderedHash.new()) do |h,k|
-
h.merge(k => dsl_hash_form[k])
-
end
-
ret.merge(:component_modules => sorted_dsl_hash_form)
-
end
-
-
1
class ComponentTypeToCheck < Array
-
1
def mapping_required?()
-
find{|r|r[:required]}
-
end
-
end
-
-
1
def project_idh()
-
return @project_idh if @project_idh
-
unless service_id = @parent.get_field?(:service_id)
-
raise Error.new("Cannot find project from parent object")
-
end
-
service_module = @parent.model_handle(:service_module).createIDH(:id => service_id).create_object()
-
unless project_id = service_module.get_field?(:project_project_id)
-
raise Error.new("Cannot find project from parent object")
-
end
-
@parent.model_handle(:project).createIDH(:id => project_id)
-
end
-
end
-
end
-
2
module DTK; class ModuleRefs
-
1
class ComponentDSLForm < Hash
-
# Elements of ComponentDSLForm
-
1
class Elements < Array
-
1
def initialize(*args)
-
args = [args] if args.size == 1 and !args.first.kind_of?(Array)
-
super(*args)
-
end
-
1
def add!(a)
-
a.each{|el|self << el}
-
self
-
end
-
end
-
-
1
def initialize(component_module, namespace, external_ref = nil)
-
super()
-
replace(:component_module => component_module, :remote_namespace => namespace, :external_ref => external_ref)
-
end
-
1
private :initialize
-
-
1
def component_module()
-
self[:component_module]
-
end
-
1
def namespace?()
-
self[:remote_namespace]
-
end
-
1
def namespace()
-
unless ret = self[:remote_namespace]
-
Log.error("namespace should not be called when self[:remote_namespace] is empty")
-
end
-
ret
-
end
-
-
# returns a hash with keys component_module_name and value MatchedInfo
-
# :match_type can be
-
# :dsl - match with element in dsl
-
# :single_match - match with unique component module
-
# :multiple_match - match with more than one component modules
-
1
MatchInfo = Struct.new(:match_type,:match_array) # match_array is an array of ComponentDSLForm elements
-
1
def self.get_ndx_module_info(project_idh,module_class,module_branch,opts={})
-
ret = Hash.new
-
raw_cmp_mod_refs = Parse.get_component_module_refs_dsl_info(module_class,module_branch)
-
return raw_cmp_mod_refs if raw_cmp_mod_refs.kind_of?(ErrorUsage::Parsing)
-
# put in parse_form
-
cmp_mod_refs = raw_cmp_mod_refs.map{|r|new(r[:component_module],r[:remote_namespace], r[:external_ref])}
-
-
# prune out any that dont have namespace
-
cmp_mod_refs.reject!{|cmr|!cmr.namespace?}
-
-
# find component modules (in parse form) that matches a component module found in dsl or
-
# in opts; module_names are the relevant modle names to return info about
-
module_names = (cmp_mod_refs.map{|r|r.component_module} + (opts[:include_module_names]||[])).uniq
-
return ret if module_names.empty?
-
cmp_mods_dsl_form = get_matching_component_modules__dsl_form(project_idh,module_names)
-
-
# for each element in cmp_mod_refs that has a namespace see if it matches an existing component module
-
# if not return an error
-
dangling_cmp_mod_refs = Array.new
-
cmp_mod_refs.each do |cmr|
-
unless cmp_mods_dsl_form.find{|cmp_mod|cmp_mod.match?(cmr)}
-
dangling_cmp_mod_refs << cmr
-
end
-
end
-
unless dangling_cmp_mod_refs.empty?
-
# TODO: is this redundant with 'inconsistent external depenedency?
-
cmrs_print_form = dangling_cmp_mod_refs.map{|cmr|cmr.print_form}.join(',')
-
err_msg = "The following component module references in the module refs file do not exist: #{cmrs_print_form}"
-
return ErrorUsage::Parsing.new(err_msg)
-
end
-
-
cmp_mod_refs.each do |cmr|
-
ret[cmr.component_module] = MatchInfo.new(:dsl,ComponentDSLForm::Elements.new(cmr))
-
end
-
if opts[:include_module_names]
-
opts[:include_module_names].each do |module_name|
-
# only add if not there already
-
unless ret[module_name]
-
match_array = ComponentDSLForm::Elements.new(cmp_mods_dsl_form.select{|cmr|module_name == cmr.component_module()})
-
unless match_array.empty?
-
match_type = (match_array.size == 1 ? :single_match : :multiple_match)
-
ret[module_name] = MatchInfo.new(match_type,match_array)
-
end
-
end
-
end
-
end
-
ret
-
end
-
-
1
def self.create_from_module_branches?(module_branches)
-
ret = nil
-
if module_branches.nil? or module_branches.empty?
-
return ret
-
end
-
mb_idhs = module_branches.map{|mb|mb.id_handle()}
-
ret = ComponentDSLForm::Elements.new
-
ModuleBranch.get_namespace_info(mb_idhs).each do |r|
-
ret << new(r[:component_module][:display_name],r[:namespace][:display_name])
-
end
-
ret
-
end
-
-
1
def print_form()
-
if ns = namespace?()
-
"#{ns}:#{component_module()}"
-
else
-
component_module()
-
end
-
end
-
-
1
def match?(cmr)
-
namespace() == cmr.namespace() and component_module() == cmr.component_module()
-
end
-
-
1
private
-
1
def self.get_matching_component_modules__dsl_form(project_idh,module_names)
-
opts = {
-
:cols => [:namespace_id,:namespace],
-
:filter => [:oneof,:display_name,module_names]
-
}
-
matching_modules = ComponentModule.get_all_with_filter(project_idh,opts)
-
matching_modules.map{|m| new(m[:display_name],m[:namespace][:name])}
-
end
-
end
-
end; end
-
-
1
module DTK
-
1
class ModuleRefs
-
1
class Lock < Hash
-
1
r8_nested_require('lock','missing_information')
-
-
# This object is hash of form
-
# {MODULE_NAME1 => ModuleRef::Lock,
-
# MODULE_NAME2 => ModuleRef::Lock,
-
# ....
-
# }
-
1
attr_reader :assembly_instance
-
1
def initialize(assembly_instance)
-
super()
-
@assembly_instance = assembly_instance
-
end
-
-
1
AllTypes = [:locked_dependencies,:locked_branch_shas]
-
# opts can have keys
-
# :with_module_branches - Boolean
-
# :types subset of AllTypes
-
1
def self.get(assembly_instance,opts={})
-
types = opts[:types] || AllTypes
-
opts_nested = Aux.hash_subset(opts,[:with_module_branches])
-
# First check if persisted if not then compute it
-
if persisted = (R8::Config[:module_refs_lock]||{})[:use_persistence] && get_module_refs_lock?(assembly_instance)
-
if missing_info = MissingInformation.missing_information?(persisted,types,opts_nested)
-
missing_info.fill_in_missing_information()
-
else
-
persisted
-
end
-
else
-
compute_elements(assembly_instance,types,opts_nested)
-
end
-
end
-
-
1
def self.compute(assembly_instance,opts={})
-
types = opts[:types] || AllTypes
-
opts_nested = Aux.hash_subset(opts,[:with_module_branches])
-
compute_elements(assembly_instance,types,opts)
-
end
-
-
-
1
def clear_locked_dependencies()
-
ModuleRef::Lock.clear_locked_dependencies(self)
-
end
-
-
1
def persist()
-
ModuleRef::Lock.persist(self)
-
self
-
end
-
-
1
def matching_namespace?(module_name)
-
(el = element?(module_name)) && el.namespace
-
end
-
1
def matching_locked_branch_sha?(module_name)
-
(module_ref_lock = module_ref_lock(module_name)) && module_ref_lock.locked_branch_sha
-
end
-
-
1
def matching_impls_with_children(module_names)
-
ret = Array.new
-
module_names.each do |module_name|
-
if element = element?(module_name)
-
implementations(children_elements(element)+[element]).each do |impl|
-
ret << impl unless ret.include?(impl)
-
end
-
end
-
end
-
ret
-
end
-
-
1
def elements()
-
values().map{|module_ref_lock|module_ref_lock_element(module_ref_lock)}.compact
-
end
-
-
1
private
-
1
def self.get_module_refs_lock?(assembly_instance)
-
module_ref_locks = ModuleRef::Lock.get(assembly_instance)
-
unless module_ref_locks.empty?
-
module_ref_locks.inject(new(assembly_instance)) do |h,module_ref_lock|
-
h.merge(module_ref_lock.module_name => module_ref_lock)
-
end
-
end
-
end
-
-
1
def self.compute_elements(assembly_instance,types,opts={})
-
module_refs_tree = ModuleRefs::Tree.create(assembly_instance)
-
collapsed = module_refs_tree.collapse(Aux.hash_subset(opts,[:raise_errors]))
-
collapsed.choose_namespaces!()
-
collapsed.add_implementations!(assembly_instance)
-
-
ret = new(assembly_instance)
-
collapsed.each_pair do |module_name,single_el_array|
-
if single_el_array.empty?
-
Log.error("Unexpected that single_el_array is empty")
-
else
-
if single_el_array.size > 1
-
Log.error("Unexpected that single_el_array has size > 1; picking first")
-
end
-
ret[module_name] = ModuleRef::Lock.create_from_element(assembly_instance,single_el_array.first)
-
end
-
end
-
-
if types.include?(:locked_branch_shas) or opts[:with_module_branches]
-
add_matching_module_branches!(ret)
-
end
-
if types.include?(:locked_branch_shas)
-
# requires add_matching_module_branches!(ret)
-
add_locked_branch_shas!(ret)
-
end
-
-
ret
-
end
-
-
1
def self.add_locked_branch_shas!(locked_module_refs)
-
locked_module_refs.each_pair do |module_name,module_ref_lock|
-
found = false
-
if el = module_ref_lock_element(module_ref_lock)
-
if mb = el.module_branch
-
if sha = mb[:current_sha]
-
module_ref_lock.locked_branch_sha = sha
-
found = true
-
end
-
end
-
end
-
-
unless found
-
Log.error_pp(["Unexpected that cannot find module_branch[:current_sha] for",module_name,module_ref_lock])
-
end
-
end
-
locked_module_refs
-
end
-
-
1
def self.add_matching_module_branches!(locked_module_refs)
-
ret = locked_module_refs
-
ndx_els = Hash.new
-
disjuncts = Array.new
-
locked_module_refs.elements.each do |el|
-
if impl = el.implementation
-
unless el.module_branch
-
disjuncts << [:and, [:eq,:repo_id,impl[:repo_id]], [:eq,:branch,impl[:branch]]]
-
ndx = "#{impl[:repo_id]}:#{impl[:branch]}"
-
ndx_els[ndx] = el
-
end
-
end
-
end
-
-
return ret if disjuncts.empty?
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_id,:branch,:repo_id,:current_sha,:version,:dsl_parsed],
-
:filter => [:or] + disjuncts
-
}
-
-
mh = locked_module_refs.assembly_instance.model_handle(:module_branch)
-
Model.get_objs(mh,sp_hash).each do |mb|
-
ndx = "#{mb[:repo_id]}:#{mb[:branch]}"
-
if el = ndx_els[ndx]
-
el.module_branch = mb
-
end
-
end
-
ret
-
end
-
1
def module_ref_lock(module_name)
-
self[module_name]
-
end
-
1
def element?(module_name)
-
module_ref_lock_element(module_ref_lock(module_name))
-
end
-
1
def element(module_name)
-
element?(module_name) || (Log.error("Unexpected that no match for module name '#{module_name}'"); nil)
-
end
-
1
def module_ref_lock_element(module_ref_lock)
-
self.class.module_ref_lock_element(module_ref_lock)
-
end
-
1
def self.module_ref_lock_element(module_ref_lock)
-
module_ref_lock && module_ref_lock.info
-
end
-
-
1
def children_elements(parent_element)
-
parent_element.children_module_names.map{|mn|element?(mn)}.compact
-
end
-
-
1
def implementations(elements)
-
elements.map{|el|self.class.implementation(el)}.compact
-
end
-
1
def self.implementation(element)
-
element.implementation ||
-
(Log.error("Unexpected that the module '#{element.namespace}:#{element.module_name}' does not have an corresponding implementation object"); nil)
-
end
-
-
end
-
end
-
end
-
2
module DTK; class ModuleRefs
-
1
class Lock
-
1
class MissingInformation < self
-
1
def initialize(assembly_instance,missing,complete,types,opts)
-
super(assembly_instance)
-
@missing = missing
-
@complete = complete
-
@types = types
-
@opts = opts
-
end
-
-
# types will be subset of [:locked_dependencies,:locked_branch_shas]
-
# opts can have
-
# :with_module_branches - Boolean
-
1
def self.missing_information?(module_refs_lock,types,opts={})
-
# partition into rows that are missing info and ones that are not
-
missing = Hash.new
-
complete = Hash.new
-
module_refs_lock.each_pair do |module_name,module_ref_lock|
-
if el_missing_information?(module_ref_lock,types,opts)
-
missing[module_name] = module_ref_lock
-
else
-
complete[module_name] = module_ref_lock
-
end
-
end
-
unless missing.empty?
-
new(module_refs_lock.assembly_instance,missing,complete,types,opts)
-
end
-
end
-
-
1
def fill_in_missing_information()
-
pp [:fill_in_missing_information,self]
-
raise Error.new("Need to write fill_in_missing_information")
-
end
-
1
private
-
1
def self.el_missing_information?(module_ref_lock,types,opts={})
-
if types.include?(:locked_dependencies)
-
unless info = module_ref_lock.info
-
return true
-
end
-
if opts[:with_module_branches]
-
unless info.module_branch
-
return true
-
end
-
end
-
end
-
if types.include?(:locked_branch_shas)
-
unless module_ref_lock.locked_branch_sha
-
return true
-
end
-
end
-
false
-
end
-
end
-
end
-
end; end
-
-
1
module DTK
-
1
class ModuleRefs
-
1
module MatchingTemplatesMixin
-
# component refs are augmented with :component_template key which points to
-
# associated component template or nil
-
# This method can be called when assembly is imported or staged
-
# TODO: any other time this can be called
-
1
def set_matching_component_template_info?(aug_cmp_refs,opts={})
-
ret = aug_cmp_refs
-
if aug_cmp_refs.empty?
-
return ret
-
end
-
# determine which elements of aug_cmp_refs need to be matches
-
cmp_types_to_check = determine_component_refs_needing_matches(aug_cmp_refs,opts)
-
if cmp_types_to_check.empty?
-
return ret
-
end
-
set_matching_component_template_info!(aug_cmp_refs,cmp_types_to_check,opts)
-
ret
-
end
-
-
1
private
-
1
def determine_component_refs_needing_matches(aug_cmp_refs,opts={})
-
# for each element in aug_cmp_ref, want to set cmp_template_id using following rules
-
# 1) if key 'has_override_version' is set
-
# a) if it points to a component template, use this
-
# b) otherwise look it up using given version
-
# 2) else look it up and if lookup exists use this as the value to use; element marked required if it does not point to a component template
-
# lookup based on matching both version and namespace, if namespace is given
-
cmp_types_to_check = Hash.new
-
aug_cmp_refs.each do |r|
-
unless cmp_type = r[:component_type]||(r[:component_template]||{})[:component_type]
-
ref = ComponentRef.print_form(r)
-
ref = (ref ? "(#{ref})" : "")
-
raise Error.new("Component ref #{ref} must either point to a component template or have component_type set")
-
end
-
cmp_template_id = r[:component_template_id]
-
if r[:has_override_version]
-
unless cmp_template_id
-
unless r[:version]
-
raise Error.new("Component ref has override-version flag set, but no version")
-
end
-
(cmp_types_to_check[cmp_type] ||= ComponentTypeToCheck.new) << {:pntr => r, :version => r[:version]}
-
end
-
else
-
add_item = true
-
if r[:template_id_synched] and not opts[:force_compute_template_id]
-
if cmp_template_id.nil?
-
Log.error("Unexpected that cmp_template_id is null for (#{r.inspect})")
-
else
-
add_item = false
-
end
-
end
-
if add_item
-
(cmp_types_to_check[cmp_type] ||= ComponentTypeToCheck.new) << {:pntr => r,:required => cmp_template_id.nil?}
-
end
-
end
-
r[:template_id_synched] = true #marking each item synchronized
-
end
-
-
# shortcut if no locked versions and no required elements
-
if component_modules().empty? and not cmp_types_to_check.values.find{|r|r.mapping_required?()}
-
# TODO: should we instead prune out all those that dont have mapping required
-
return Hash.new
-
end
-
cmp_types_to_check
-
end
-
-
1
def set_matching_component_template_info!(aug_cmp_refs,cmp_types_to_check,opts={})
-
ret = aug_cmp_refs
-
# Lookup up modules mapping
-
# mappings will have key for each component type referenced and for each key will return hash with keys :component_template and :version;
-
# component_template will be null if no match is found
-
mappings = get_component_type_to_template_mappings?(cmp_types_to_check.keys)
-
-
# set the component template ids; raise error if there is a required element that does not have a matching component template
-
-
if opts[:set_namespace]
-
ret.each do |cmp_ref|
-
cmp_type = cmp_ref[:component_type]
-
next unless cmp_types_to_check[cmp_type]
-
if cmp_type_info = mappings[cmp_type]
-
if namespace = cmp_type_info[:namespace]
-
cmp_ref[:namespace] = namespace
-
end
-
end
-
end
-
end
-
-
reference_errors = Array.new
-
cmp_types_to_check.each do |cmp_type,els|
-
els.each do |el|
-
cmp_type_info = mappings[cmp_type]
-
if cmp_template = cmp_type_info[:component_template]
-
el[:pntr][:component_template_id] = cmp_template[:id]
-
unless opts[:donot_set_component_templates]
-
el[:pntr][:component_template] = cmp_template
-
end
-
elsif el[:required]
-
# TODO: This should not be reached because if error then an error wil be raised by get_component_type_to_template_mappings? call
-
Log.error("TODO: may put back in logic to accrue errors; until then this should not be reached")
-
# cmp_ref = {
-
# :component_type => cmp_type,
-
# :version => cmp_type_info[:version]
-
# }
-
# reference_errors << cmp_ref
-
end
-
end
-
end
-
unless reference_errors.empty?
-
raise ServiceModule::ParsingError::DanglingComponentRefs.new(reference_errors)
-
end
-
update_module_refs_dsl?(mappings)
-
ret
-
end
-
-
1
def get_component_type_to_template_mappings?(cmp_types,opts={})
-
ret = Hash.new
-
return ret if cmp_types.empty?
-
# first put in ret info about component type and version
-
ret = cmp_types.inject(Hash.new) do |h,cmp_type|
-
version = version_string?(cmp_type)
-
el = Component::Template::MatchElement.new(
-
:component_type => cmp_type,
-
:version_field => ModuleBranch.version_field(version)
-
)
-
if version
-
el[:version] = version
-
end
-
if namespace = namespace?(cmp_type)
-
el[:namespace] = namespace
-
end
-
h.merge(cmp_type => el)
-
end
-
-
# get matching component template info and insert matches into ret
-
Component::Template.get_matching_elements(project_idh(),ret.values,opts).each do |cmp_template|
-
ret[cmp_template[:component_type]].merge!(:component_template => cmp_template)
-
end
-
ret
-
end
-
-
1
def update_module_refs_dsl?(cmp_type_to_template_mappings)
-
module_name_to_ns = Hash.new
-
cmp_type_to_template_mappings.each do |cmp_type,cmp_info|
-
module_name = module_name(cmp_type)
-
unless module_name_to_ns[module_name]
-
if namespace = (cmp_info[:component_template]||{})[:namespace]
-
module_name_to_ns[module_name] = namespace
-
end
-
end
-
end
-
cmp_module_refs_to_add = Array.new
-
module_name_to_ns.each do |cmp_module_name,namespace|
-
if component_module_ref = component_module_ref?(cmp_module_name)
-
unless component_module_ref.namespace() == namespace
-
raise Error.new("Unexpected that at this point component_module_ref.namespace() (#{component_module_ref.namespace()}) not equal to namespace (#{namespace})")
-
end
-
else
-
new_cmp_moule_ref = {
-
:module_name=>cmp_module_name,
-
:module_type=>"component",
-
:namespace_info=>namespace
-
}
-
cmp_module_refs_to_add << new_cmp_moule_ref
-
end
-
end
-
unless cmp_module_refs_to_add.empty?
-
ModuleRef.update(:add,@parent,cmp_module_refs_to_add)
-
end
-
end
-
-
1
def version_string?(component_type)
-
if cmp_module_ref = component_types_module_ref?(component_type)
-
cmp_module_ref.version_string()
-
end
-
end
-
-
1
def namespace?(component_type)
-
if cmp_module_ref = component_types_module_ref?(component_type)
-
cmp_module_ref.namespace()
-
end
-
end
-
-
1
def module_name(component_type)
-
Component.module_name(component_type)
-
end
-
-
1
def component_types_module_ref?(component_type)
-
component_module_ref?(module_name(component_type))
-
end
-
end
-
end
-
end
-
-
1
module DTK
-
1
class ModuleRefs
-
1
module Mixin
-
1
def set_component_module_version(component_module,component_version,service_version=nil)
-
cmp_module_name = component_module.module_name()
-
# make sure that component_module has version defined
-
unless component_mb = component_module.get_module_branch_matching_version(component_version)
-
defined_versions = component_module.get_module_branches().map{|r|r.version_print_form()}.compact
-
version_info =
-
if defined_versions.empty?
-
"there are no versions loaded"
-
else
-
"available versions: #{defined_versions.join(', ')}"
-
end
-
raise ErrorUsage.new("Component module (#{cmp_module_name}) does not have version (#{component_version}) defined; #{version_info}")
-
end
-
-
cmp_module_refs = get_component_module_refs(service_version)
-
-
# check if set to this version already; if so no-op
-
if cmp_module_refs.has_module_version?(cmp_module_name,component_version)
-
return ret_clone_update_info(service_version)
-
end
-
-
# set in cmp_module_refs the module have specfied value and update both model and service's global refs
-
cmp_module_refs.set_module_version(cmp_module_name,component_version)
-
-
# update the component refs with the new component_template_ids
-
cmp_module_refs.update_component_template_ids(component_module)
-
-
ret_clone_update_info(service_version)
-
end
-
-
1
def get_component_module_refs(service_version=nil)
-
branch = get_module_branch_matching_version(service_version)
-
ModuleRefs.get_component_module_refs(branch)
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleRefs
-
1
class Parse < self
-
1
def self.get_component_module_refs_dsl_info(module_class,module_branch,opts={})
-
module_class::DSLParser.parse_directory(module_branch,:component_module_refs,opts)
-
end
-
-
1
def self.update_component_module_refs_from_parse_objects(module_class,module_branch,cmp_dsl_form_els)
-
hash_content = semantic_parse(module_branch,cmp_dsl_form_els)
-
return hash_content if hash_content.kind_of?(ErrorUsage::Parsing)
-
update(module_branch, hash_content)
-
ModuleRefs.new(module_branch,hash_content,:content_hash_form_is_reified => true)
-
end
-
-
1
def self.update_component_module_refs(module_class,module_branch,opts={})
-
dsl_info = get_component_module_refs_dsl_info(module_class,module_branch,opts)
-
return dsl_info if dsl_info.kind_of?(ErrorUsage::Parsing)
-
update_component_module_refs_from_parse_objects(module_class,module_branch,dsl_info)
-
end
-
-
1
private
-
1
def self.semantic_parse(branch,dsl_info)
-
ret = nil
-
begin
-
ret = reify_content(branch.model_handle(:model_ref),dsl_info)
-
rescue ErrorUsage::Parsing => e
-
return e
-
rescue => e
-
#TODO: Logging to make sure that it is parse error and not code error
-
Log.info_pp([e,e.backtrace[0..5]])
-
return ErrorUsage::Parsing.new('Module refs parsing error')
-
end
-
ret
-
end
-
-
1
def self.reify_content(mh,object)
-
return {} unless object
-
# if Hash type then this comes from querying the model ref table
-
if object.kind_of?(Hash)
-
object.inject(Hash.new) do |h,(k,v)|
-
if v.kind_of?(ModuleRef)
-
h.merge(k.to_sym => ModuleRef.reify(mh,v))
-
else
-
raise Error.new("Unexpected value associated with component module ref: #{v.inspect}")
-
end
-
end
-
#This comes from parsing the dsl file
-
elsif object.kind_of?(ServiceModule::DSLParser::Output) or object.kind_of?(ComponentDSLForm::Elements)
-
object.inject(Hash.new) do |h,r|
-
internal_form = convert_parse_to_internal_form(r)
-
h.merge(parse_form_module_name(r).to_sym => ModuleRef.reify(mh,internal_form))
-
end
-
else
-
raise Error.new("Unexpected input (#{object.class})")
-
end
-
end
-
-
1
def self.parse_form_module_name(parse_form_hash)
-
ret = parse_form_hash[:component_module]
-
ErrorUsage::Parsing.raise_error_if_not(ret,String,
-
:type => 'module name',:for => 'component module ref')
-
ret
-
end
-
-
1
def self.convert_parse_to_internal_form(parse_form_hash)
-
ret = {
-
:module_name => parse_form_hash[:component_module],
-
:module_type => 'component'
-
}
-
# TODO: should have dtk common return namespace_info instead of remote_namespace
-
if namespace_info = parse_form_hash[:remote_namespace]
-
ret[:namespace_info] = namespace_info
-
end
-
if version_info = parse_form_hash[:version_info]
-
ret[:version_info] = version_info
-
end
-
-
if external_ref = parse_form_hash[:external_ref]
-
ret[:external_ref] = external_ref
-
end
-
-
ret
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class ModuleRefs
-
# This class is used to build a hierarchical dependency tree and to detect conflicts
-
1
class Tree
-
1
r8_nested_require('tree','collapsed')
-
1
include Collapsed::Mixin
-
-
1
MISSING_MODULE__REF_TYPE = '-- MISSING MODULE REF --'
-
-
1
attr_reader :module_branch
-
1
def initialize(module_branch,context=nil)
-
@module_branch = module_branch
-
@context = context
-
# module_refs is hash where key is module_name and
-
# value is either nil for a missing reference
-
# or it points to a Tree object
-
@module_refs = Hash.new
-
end
-
1
private :initialize
-
-
# opts can have
-
# :components - a set of component instances to contrain what is returned
-
1
def self.create(assembly_instance, opts={})
-
assembly_branch = AssemblyModule::Service.get_assembly_branch(assembly_instance)
-
components = opts[:components] || assembly_instance.get_component_instances()
-
create_module_refs_starting_from_assembly(assembly_instance,assembly_branch,components)
-
end
-
-
1
def isa_missing_module_ref?()
-
@context.kind_of?(ModuleRef::Missing) && @context
-
end
-
1
def isa_module_ref?()
-
@context.kind_of?(ModuleRef) && @context
-
end
-
-
1
def violations?()
-
missing = Hash.new
-
multi_ns = Hash.new
-
refs = hash_form()
-
-
refs.each do |k,v|
-
if k == :refs
-
check_refs(v, missing, multi_ns)
-
end
-
end
-
-
multi_ns.delete_if{|k,v| v.size < 2}
-
return missing, multi_ns
-
end
-
-
1
def check_refs(refs, missing, multi_ns)
-
return unless refs
-
-
refs.each do |name,ref|
-
if ref
-
namespace = ref[:namespace]
-
type = ref[:type]
-
if val = multi_ns["#{name}"]
-
unless val.include?(namespace)
-
val << namespace
-
multi_ns.merge!(name => val)
-
end
-
elsif type && type.to_s.eql?(MISSING_MODULE__REF_TYPE)
-
missing.merge!(name => namespace)
-
else
-
multi_ns.merge!(name => [namespace])
-
end
-
check_refs(ref[:refs], missing, multi_ns) if ref.has_key?(:refs)
-
else
-
# we don't know which namespace this module belongs to, so sending empty namespace
-
missing.merge!(name => '')
-
end
-
end
-
end
-
-
1
def hash_form()
-
ret = Hash.new
-
if @context.kind_of?(Assembly)
-
ret[:type] = Workspace.is_workspace?(@context) ? 'Workspace' : 'Assembly::Instance'
-
ret[:name] = @context.get_field?(:display_name)
-
elsif isa_module_ref?()
-
ret[:type] = 'ModuleRef'
-
ret[:namespace] = namespace()
-
if external_ref = external_ref?()
-
ret[:external_ref] = external_ref
-
end
-
elsif isa_missing_module_ref?()
-
ret[:type] = MISSING_MODULE__REF_TYPE
-
ret[:namespace] = namespace()
-
else
-
ret[:type] = @context.class
-
ret[:content] = @context
-
end
-
-
refs = @module_refs.inject(Hash.new) do |h,(module_name,subtree)|
-
h.merge(module_name => subtree && subtree.hash_form())
-
end
-
ret[:refs] = refs unless refs.empty?
-
-
ret
-
end
-
-
1
def add_module_ref!(module_name,child)
-
@module_refs[module_name] = child
-
end
-
-
1
def namespace()
-
namespace?() || (Log.error_pp(["Unexpected that no namespace_info for",self]); nil)
-
end
-
1
def namespace?()
-
if @context.kind_of?(ModuleRef)
-
@context[:namespace_info]
-
elsif @context.kind_of?(ModuleRef::Missing)
-
@context.namespace
-
end
-
end
-
-
1
def external_ref?()
-
if @context.kind_of?(ModuleRef)
-
@context[:external_ref]
-
end
-
end
-
-
1
def recursive_add_module_refs!(parent_path=[])
-
get_children([module_branch()]) do |module_name,namespace,child|
-
ns_module_name = self.class.namespace_model_name_path_el(namespace,module_name)
-
path = parent_path+[ns_module_name]
-
if parent_path.include?(ns_module_name)
-
recursive_loop = path.join(' -> ')
-
raise ErrorUsage.new("Module '#{ns_module_name}' is in a recursive loop: #{recursive_loop}")
-
end
-
if child
-
add_module_ref!(module_name,child)
-
child.recursive_add_module_refs!(path)
-
else
-
# missing ref to module_name,namespace
-
module_branch = nil
-
context = ModuleRef::Missing.new(module_name,namespace)
-
add_module_ref!(module_name,self.class.new(module_branch,context))
-
end
-
end
-
self
-
end
-
-
1
private
-
-
1
def self.namespace_model_name_path_el(namespace,module_name)
-
namespace ? "#{namespace}:#{module_name}" : module_name
-
end
-
-
1
def self.create_module_refs_starting_from_assembly(assembly_instance,assembly_branch,components)
-
# get relevant service and component module branches
-
ndx_cmps = Hash.new #components indexed (grouped) by branch id
-
components.each do |cmp|
-
unless branch_id = cmp.get_field?(:module_branch_id)
-
Log.error("Unexpected that :module_branch_id not in: #{cmp.inspect}")
-
next
-
end
-
(ndx_cmps[branch_id] ||= Array.new) << cmp
-
end
-
cmp_module_branch_ids = ndx_cmps.keys
-
-
sp_hash = {
-
:cols => ModuleBranch.common_columns(),
-
:filter => [:oneof,:id,cmp_module_branch_ids]
-
}
-
cmp_module_branches = Model.get_objs(assembly_instance.model_handle(:module_branch),sp_hash)
-
-
#TODO: extra check we can remove after we refine
-
missing_branches = cmp_module_branch_ids - cmp_module_branches.map{|r|r[:id]}
-
unless missing_branches.empty?
-
Log.error("Unexpected that the following branches dont exist; branches with ids #{missing_branches.join(',')}")
-
end
-
-
ret = new(assembly_branch,assembly_instance)
-
get_top_level_children(cmp_module_branches,assembly_branch) do |module_name,child|
-
if child
-
ret.add_module_ref!(module_name,child)
-
parent_path = [namespace_model_name_path_el(child.namespace,module_name)]
-
child.recursive_add_module_refs!(parent_path)
-
else
-
Log.error_pp(["Unexpected that in get_top_level_children child can be nil",cmp_module_branches,assembly_branch])
-
end
-
end
-
ret
-
end
-
-
-
# TODO: fix this up because cmp_module_branches already has implict namespace so this is
-
# effectively just checking consistency of component module refs
-
# and setting of module_branch_id in component insatnces
-
1
def self.get_top_level_children(cmp_module_branches,service_module_branch,&block)
-
# get component module refs indexed by module name
-
ndx_module_refs = Hash.new
-
ModuleRefs.get_component_module_refs(service_module_branch).component_modules.each_value do |module_ref|
-
ndx_module_refs[module_ref[:module_name]] ||= module_ref
-
end
-
-
# get branches indexed by module_name
-
# TODO: can bulk up; look also at using
-
# assembly_instance.get_objs(:cols=> [:instance_component_module_branches])
-
ndx_mod_name_branches = cmp_module_branches.inject(Hash.new) do |h,module_branch|
-
h.merge(module_branch.get_module()[:display_name] => module_branch)
-
end
-
-
ndx_mod_name_branches.each_pair do |module_name,module_branch|
-
module_ref = ndx_module_refs[module_name]
-
child = module_ref && new(module_branch,module_ref)
-
block.call(module_name,child)
-
end
-
end
-
-
1
def get_children(module_branches,opts={},&block)
-
# get component module refs indexed by module name
-
ndx_module_refs = Hash.new
-
ModuleRefs.get_multiple_component_module_refs(module_branches).each do |cmrs|
-
cmrs.component_modules.each_value do |module_ref|
-
ndx_module_refs[module_ref[:id]] ||= module_ref
-
end
-
end
-
module_refs = ndx_module_refs.values
-
-
#ndx_module_branches is component module branches indexed by module ref id
-
ndx_module_branches = Hash.new
-
ModuleRef.find_ndx_matching_component_modules(module_refs).each_pair do |mod_ref_id,cmp_module|
-
version = nil #TODO: stub; need to change when treat service isnatnce branches
-
ndx_module_branches[mod_ref_id] = cmp_module.get_module_branch_matching_version(version)
-
end
-
-
module_refs.each do |module_ref|
-
module_branch = ndx_module_branches[module_ref[:id]]
-
child = module_branch && self.class.new(module_branch,module_ref)
-
block.call(module_ref[:module_name],module_ref[:namespace_info],child)
-
end
-
end
-
-
end
-
end
-
end
-
2
module DTK; class ModuleRefs
-
1
class Tree
-
1
class Collapsed < Hash
-
1
module Mixin
-
1
def collapse(opts={})
-
ret = Collapsed.new
-
level = opts[:level] || 1
-
@module_refs.each_pair do |module_name,subtree|
-
if missing_module_ref = subtree.isa_missing_module_ref?()
-
if opts[:raise_errors]
-
raise missing_module_ref.error()
-
else
-
next
-
end
-
end
-
-
children_module_names = Array.new
-
opts_subtree = Aux.hash_subset(opts,[:raise_errors]).merge(:level => level+1)
-
subtree.collapse(opts_subtree).each_pair do |subtree_module_name,subtree_els|
-
collapsed_tree_els = ret[subtree_module_name] ||= Array.new
-
subtree_els.each do |subtree_el|
-
unless collapsed_tree_els.find{|el|el == subtree_el}
-
collapsed_tree_els << subtree_el
-
end
-
subtree_el.children_and_this_module_names().each do |st_module_name|
-
children_module_names << st_module_name unless children_module_names.include?(st_module_name)
-
end
-
end
-
end
-
-
if namespace = subtree.namespace()
-
opts_create = {:children_module_names => children_module_names}
-
if external_ref = subtree.external_ref?()
-
opts_create.merge!(:external_ref => external_ref)
-
end
-
(ret[module_name] ||= Array.new) << ModuleRef::Lock::Info.new(namespace,module_name,level,opts_create)
-
end
-
end
-
ret
-
end
-
end
-
-
# opts[:stratagy] can be
-
# :pick_first_level - if multiple and have first level one then use that otherwise will randomly pick top one
-
1
def choose_namespaces!(opts={})
-
strategy = opts[:strategy] || DefaultStrategy
-
if strategy == :pick_first_level
-
choose_namespaces__pick_first_level!()
-
else
-
raise Error.new("Currently not supporting namespace resolution strategy '#{strategy}'")
-
end
-
end
-
1
DefaultStrategy = :pick_first_level
-
-
1
def add_implementations!(assembly_instance)
-
ndx_impls = get_relevant_ndx_implementations(assembly_instance)
-
each_element do |el|
-
ndx = impl_index(el.namespace,el.module_name)
-
if impl = ndx_impls[ndx]
-
el.implementation = impl
-
end
-
end
-
self
-
end
-
-
1
private
-
1
def impl_index(namespace,module_name)
-
"#{namespace}:#{module_name}"
-
end
-
-
# returns implementations indexed by impl_index
-
1
def get_relevant_ndx_implementations(assembly_instance)
-
base_version_field = Implementation.version_field(BaseVersion)
-
assembly_version_field = Implementation.version_field(assembly_version(assembly_instance))
-
disjuncts = Array.new
-
each_element do |el|
-
disjunct =
-
[:and,
-
[:eq,:module_name,el.module_name],
-
[:eq,:module_namespace,el.namespace],
-
[:oneof,:version,[base_version_field,assembly_version_field]]
-
]
-
disjuncts << disjunct
-
end
-
filter = ((disjuncts.size == 1) ? disjuncts.first : ([:or] + disjuncts))
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:repo,:repo_id,:branch,:module_name,:module_namespace,:version],
-
:filter => filter
-
}
-
# get the implementations that meet sp_hash, but if have two matches for a module_name/module_namespace pair
-
# return just one that matches the assembly version
-
ret = Hash.new
-
Model.get_objs(assembly_instance.model_handle(:implementation),sp_hash).each do |r|
-
ndx = impl_index(r[:module_namespace],r[:module_name])
-
# if ndx_ret[ndx], dont replace if what is there is the assembly branch
-
unless (ret[ndx]||{})[:version] == assembly_version_field
-
ret[ndx] = r
-
end
-
end
-
ret
-
end
-
1
BaseVersion = nil
-
-
1
def assembly_version(assembly_instance)
-
ModuleVersion.ret(assembly_instance)
-
end
-
-
1
def choose_namespaces__pick_first_level!(opts={})
-
each_pair do |module_name,els|
-
if els.size > 1
-
first_el = els.sort{|a,b| a.level <=> b.level}.first
-
#warning only if first_el does not have level 1 and multiple namesapces
-
unless first_el.level == 1
-
namespaces = els.map{|el|el.namespace}.uniq
-
if namespaces.size > 1
-
Log.error("Multiple namespaces (#{namespaces.join(',')}) for '#{module_name}'; picking one '#{first_el.namespace}'")
-
end
-
end
-
self[module_name] = [first_el]
-
end
-
end
-
self
-
end
-
-
1
def each_element(&block)
-
values.each{|els|els.each{|el|block.call(el)}}
-
end
-
-
end
-
end
-
end; end
-
1
module XYZ
-
1
class MonitoringItem < Model
-
end
-
end
-
-
1
module DTK
-
1
class Namespace < Model
-
# TODO: get rid of this class and fold into paraent after finish conversion
-
# Methods that use this constant are:
-
# - namespace_delimiter
-
# - join_namespace
-
# - full_module_name_parts?
-
# - namespace_from_ref?
-
# - module_ref_field
-
-
1
NAMESPACE_DELIMITER = ':'
-
-
1
def self.namespace_delimiter()
-
106
NAMESPACE_DELIMITER
-
end
-
-
1
def self.common_columns()
-
[
-
5
:id,
-
:group_id,
-
:display_name,
-
:name,
-
:remote
-
]
-
end
-
-
# TODO: should these both be replaced by something that doed not rely on format of ref
-
1
def self.namespace_from_ref?(service_module_ref)
-
if service_module_ref.include? namespace_delimiter()
-
service_module_ref.split(namespace_delimiter()).first
-
end
-
end
-
-
1
def self.module_ref_field(module_name,namespace)
-
"#{namespace}#{namespace_delimiter()}#{module_name}"
-
end
-
-
#
-
# Get/Create default namespace
-
#
-
1
def self.default_namespace(namespace_mh)
-
find_or_create(namespace_mh, default_namespace_name)
-
end
-
-
1
def self.enrich_with_default_namespace(module_name)
-
module_name.include?(NAMESPACE_DELIMITER) ? module_name : "#{default_namespace_name}#{NAMESPACE_DELIMITER}#{module_name}"
-
end
-
-
# if user for some reason set R8::Config[:repo][:local][:default_namespace] to '' we will use running_process_user() as namespace
-
1
def self.default_namespace_name
-
CurrentSession.get_default_namespace()||R8::Config[:repo][:local][:default_namespace]||::DTK::Common::Aux.running_process_user()
-
end
-
-
1
def self.join_namespace(namespace, name)
-
104
"#{namespace}#{namespace_delimiter()}#{name}"
-
end
-
-
# returns [namespace,name]; namespace can be null if cant determine it
-
1
def self.full_module_name_parts?(name_or_full_module_name)
-
1
if name_or_full_module_name =~ Regexp.new("(^.+)#{namespace_delimiter()}(.+$)")
-
namespace,name = [$1,$2]
-
else
-
1
namespace,name = [nil,name_or_full_module_name]
-
end
-
1
[namespace,name]
-
end
-
-
1
def self.find_by_name(namespace_mh, namespace_name)
-
1
sp_hash = {
-
:cols => common_columns(),
-
:filter => [:eq, :name, namespace_name.to_s.downcase]
-
}
-
-
1
results = Model.get_objs(namespace_mh, sp_hash)
-
1
raise Error, "There should not be multiple namespaces with name '#{namespace_name}'" if results.size > 1
-
1
results.first
-
end
-
-
1
def self.find_or_create(namespace_mh, namespace_name)
-
namespace_name = namespace_name.is_a?(Namespace) ? namespace_name.display_name : namespace_name
-
raise Error, "You need to provide namespace name where creating object" if namespace_name.nil? || namespace_name.empty?
-
namespace = self.find_by_name(namespace_mh, namespace_name)
-
-
unless namespace
-
namespace = create_new(namespace_mh, namespace_name)
-
end
-
-
namespace
-
end
-
-
1
def self.find_or_create_or_default(namespace_mh, namespace_name)
-
namespace_obj = nil
-
if (namespace_name && !namespace_name.empty?)
-
namespace_obj = self.find_or_create(namespace_mh, namespace_name)
-
else
-
namespace_obj = self.default_namespace(namespace_mh)
-
end
-
-
namespace_obj
-
end
-
-
#
-
# Create namespace object
-
#
-
1
def self.create_new(namespace_mh, name, remote=nil)
-
idh = create_from_rows(namespace_mh,
-
[{
-
:name => name,
-
:display_name => name,
-
:ref => name,
-
:remote => remote
-
}]
-
).first
-
-
idh.create_object()
-
end
-
-
# TODO: would need to enhance if get a legitimate key, but it has nil or false value
-
1
def method_missing(m, *args, &block)
-
get_field?(m) || super(m, *args, &block)
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class NetworkPartition < Model
-
1
set_relation_name(:network,:partition)
-
1
class << self
-
1
def up()
-
1
ds_column_defs :ds_key
-
1
column :is_deployed, :boolean, :default => false
-
1
column :is_internet, :boolean, :default => false #TBD might replace with :type
-
1
many_to_one :library, :datacenter
-
end
-
end
-
end
-
-
1
class NetworkGateway < Model
-
1
set_relation_name(:network,:gateway)
-
1
class << self
-
1
def up()
-
1
ds_column_defs :ds_attributes, :ds_key
-
1
column :is_deployed, :boolean, :default => false
-
1
foreign_key :network_partition1_id, :network_partition, FK_CASCADE_OPT
-
1
foreign_key :network_partition2_id, :network_partition, FK_CASCADE_OPT
-
1
many_to_one :library, :datacenter
-
end
-
##### Actions
-
end
-
end
-
-
# TBD: might move AddressAccessPoint to node or own model file
-
1
class AddressAccessPoint < Model
-
1
set_relation_name(:network,:address_access_point)
-
1
class << self
-
1
def up()
-
1
column :network_address, :json #e.g., {:family : "ipv4, :address : "10.4.5.7"} allow family: "dns" :address"
-
1
column :type, :varchar, :size => 25 #internet,local ..
-
1
foreign_key :network_partition_id, :network_partition, FK_CASCADE_OPT
-
1
many_to_one :node
-
end
-
##### Actions
-
end
-
end
-
end
-
1
module DTK
-
1
class Node < Model
-
1
r8_nested_require('node','meta')
-
1
extend NodeMetaClassMixin
-
1
set_relation_name(:node,:node)
-
-
1
r8_nested_require('node','type')
-
1
r8_nested_require('node','template')
-
1
r8_nested_require('node','instance')
-
1
r8_nested_require('node','target_ref')
-
1
r8_nested_require('node','filter')
-
1
r8_nested_require('node','clone')
-
1
r8_nested_require('node','node_attribute')
-
1
r8_nested_require('node','external_ref')
-
1
r8_nested_require('node','delete')
-
1
r8_nested_require('node','dangling_link_mixin')
-
-
1
include Type::Mixin
-
1
include Clone::Mixin
-
1
extend NodeAttribute::ClassMixin
-
1
include NodeAttribute::Mixin
-
1
include ExternalRef::Mixin
-
1
include Delete::Mixin
-
1
include DanglingLink::Mixin
-
-
1
def self.common_columns()
-
[
-
5
:id,
-
:group_id,
-
:display_name,
-
:name,
-
:os_type,
-
:type,
-
:description,
-
:status,
-
:target_id,
-
:ui,
-
:external_ref,
-
:hostname_external_ref,
-
:managed,
-
:admin_op_status
-
]
-
end
-
-
1
def create_obj_optional_subclass()
-
is_node_group?() ? create_obj_subclass() : self
-
end
-
1
def create_obj_subclass()
-
create_subclass_obj(node_group_model_name())
-
end
-
1
private :create_obj_subclass
-
-
1
def is_target_ref?(opts={})
-
TargetRef.types(opts).include?(get_field?(:type))
-
end
-
-
1
def is_assembly_wide_node?()
-
update_object!(:type)
-
self[:type].eql?('assembly_wide')
-
end
-
-
1
def self.assembly_node_print_form?(obj)
-
if obj.kind_of?(Node)
-
if obj.get_field?(:display_name)
-
obj.assembly_node_print_form()
-
end
-
end
-
end
-
-
1
def assembly_node_print_form()
-
if is_target_ref?()
-
TargetRef.assembly_node_print_form(self)
-
else
-
get_field?(:display_name)
-
end
-
end
-
-
#This is overwritten by node group subclasses
-
1
def get_node_group_members()
-
#in case this called on superclass that is actually a node group
-
if is_node_group?()
-
create_obj_subclass().get_node_group_members()
-
else
-
[self]
-
end
-
end
-
-
1
def self.create_from_model_handle(hash_scalar_values,model_handle,opts={})
-
ret = super(hash_scalar_values,model_handle)
-
opts[:subclass] ? ret.create_obj_optional_subclass() : ret
-
end
-
-
# TODO: stub for feature_node_admin_state
-
1
def persistent_hostname?()
-
false
-
end
-
-
### virtual column defs
-
#######################
-
# TODO: write as sql fn for efficiency
-
1
def has_pending_change()
-
((get_field?(:action)||{})[:count]||0) > 0
-
end
-
-
1
def status()
-
# assumes :is_deployed and :operational_status are set
-
(not self[:is_deployed]) ? Type::Node.staged : self[:operational_status]
-
end
-
-
1
def target_id()
-
get_field?(:datacenter_datacenter_id)
-
end
-
-
1
def name()
-
get_field?(:display_name)
-
end
-
-
1
def pp_name_and_id(opts={})
-
first_word = (opts[:capitalize] ? 'Node' : 'node')
-
"#{first_word} (#{name()}) with id (#{id.to_s})"
-
end
-
-
########
-
1
def self.stop_instances(nodes)
-
CommandAndControl.stop_instances(nodes)
-
nodes.each{|node|node.attribute.clear_host_addresses()}
-
end
-
-
#######################
-
# standard get methods
-
1
def get_target(additional_columns = [])
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name] + additional_columns,
-
:filter => [:eq,:id,target_id()]
-
}
-
Target::Instance.get_obj(model_handle(:target_instance),sp_hash)
-
end
-
-
1
def get_target_iaas_type()
-
get_target().get_iaas_type()
-
end
-
-
1
def get_target_iaas_credentials()
-
# TODO: Haris - When we support multiple IAAS we will need to modify logic here
-
get_target().get_aws_compute_params()
-
end
-
-
1
def self.get_violations(id_handles)
-
get_objs_in_set(id_handles,{:cols => [:violations]}).map{|r|r[:violation]}
-
end
-
-
1
def get_project()
-
get_objects_col_from_sp_hash(:cols => [:project]).first
-
end
-
-
1
def self.get_ports(id_handles)
-
get_objs_in_set(id_handles,{:cols => [:ports]},{:keep_ref_cols => true}).map{|r|r[:port]}
-
end
-
-
1
def get_port_links()
-
self.class.get_port_links([id_handle()])
-
end
-
-
1
def self.get_port_links(id_handles)
-
ret = Array.new
-
ports = get_ports(id_handles)
-
return ret if ports.empty?()
-
port_ids = ports.map{|p|p[:id]}
-
sp_hash = {
-
:cols => PortLink.common_columns(),
-
:filter => [:or, [:oneof, :input_id, port_ids], [:oneof, :output_id, port_ids]]
-
}
-
port_link_mh = ports.first.model_handle(:port_link)
-
Model.get_objs(port_link_mh,sp_hash)
-
end
-
-
# TODO: gui based may remove
-
1
def get_ports(*types)
-
port_list = self.class.get_ports([id_handle])
-
i18n = get_i18n_mappings_for_models(:component,:attribute)
-
port_list.map{|port|port.filter_and_process!(i18n,*types)}.compact
-
end
-
-
######### Model apis
-
-
1
def get_assembly?(cols=nil)
-
if assembly_id = get_field?(:assembly_id)
-
sp_hash = {
-
:cols => cols||[:id,:group_id,:display_name],
-
:filter => [:eq,:id,assembly_id]
-
}
-
Assembly::Instance.get_objs(model_handle(:assembly_instance),sp_hash).first
-
end
-
end
-
-
1
def self.list(model_handle,opts={})
-
target_filter = (opts[:target_idh] ? [:eq,:datacenter_datacenter_id,opts[:target_idh].get_id()] : [:neq,:datacenter_datacenter_id,nil])
-
filter = [:and, [:oneof, :type, [Type::Node.instance,Type::Node.staged,Type::Node.physical]], target_filter]
-
sp_hash = {
-
:cols => common_columns() + [:assemblies],
-
:filter => filter
-
}
-
cols_except_name = common_columns() - [:display_name]
-
get_objs(model_handle,sp_hash).map do |n|
-
el = n.hash_subset(*cols_except_name)
-
assembly_name = (n[:assembly]||{})[:display_name]
-
el.merge(:display_name => user_friendly_name(n[:display_name],assembly_name))
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def self.list_wo_assembly_nodes(model_handle)
-
filter = [:and, [:oneof, :type, [Type::Node.instance,Type::Node.staged]], [:eq, :assembly_id, nil]]
-
sp_hash = {
-
:cols => common_columns() + [:assemblies],
-
:filter => filter
-
}
-
cols_except_name = common_columns() - [:display_name]
-
get_objs(model_handle,sp_hash).map do |n|
-
el = n.hash_subset(*cols_except_name)
-
assembly_name = (n[:assembly]||{})[:display_name]
-
el.merge(:display_name => user_friendly_name(n[:display_name],assembly_name))
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def self.legal_display_name?(display_name)
-
display_name =~ LegalDisplayName
-
end
-
1
LegalDisplayName = /^[a-zA-Z0-9_:\[\]\.-]+$/
-
-
1
def self.user_friendly_name(node_name,assembly_name=nil)
-
assembly_name ? "#{assembly_name}::#{node_name}" : node_name
-
end
-
1
private_class_method :user_friendly_name
-
-
# returns [node_name, assembly_name] later which could be nil
-
1
def self.parse_user_friendly_name(name)
-
node_name = assembly_name = nil
-
if name =~ Regexp.new("(^.+)#{AssemblyNodeNameSep}(.+$)")
-
node_name,assembly_name = [$2,$1]
-
else
-
node_name = name
-
end
-
unless legal_display_name?(node_name)
-
raise ErrorNameInvalid.new(node_name,:node)
-
end
-
[node_name,assembly_name]
-
end
-
1
private_class_method :parse_user_friendly_name
-
1
AssemblyNodeNameSep = '::'
-
-
1
def info(opts={})
-
ret = get_obj(:cols => InfoCols).hash_subset(*InfoCols)
-
opts[:print_form] ? info_print_form_processing!(ret) : ret
-
end
-
1
InfoCols = [:id,:display_name,:os_type,:type,:description,:status,:external_ref,:assembly_id]
-
-
1
def info_print_form_processing!(info_hash)
-
if external_ref = info_hash[:external_ref]
-
private_dns = external_ref[:private_dns_name]
-
if private_dns.kind_of?(Hash)
-
# then :private_dns_name is of form <public dns> => <private dns>
-
external_ref[:private_dns_name] = private_dns.values.first
-
end
-
end
-
info_hash
-
end
-
-
1
def self.sanitize!(node)
-
if external_ref = node[:external_ref]
-
external_ref.delete(:ssh_credentials)
-
end
-
end
-
1
def sanitize!()
-
self.class.sanitize!(self)
-
end
-
-
1
def info_about(about,opts={})
-
case about
-
when :components
-
get_objs(:cols => [:components],:keep_ref_cols => true).map do |r|
-
r[:component].convert_to_print_form!()
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
when :attributes
-
get_attributes_print_form()
-
else
-
raise Error.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end
-
end
-
-
1
def find_violations()
-
cmps = get_objs(:cols => [:components],:keep_ref_cols => true)
-
-
ret = Array.new
-
return ret if cmps.empty?
-
-
cmps.each do |cmp|
-
sp_hash = {
-
:cols => [:id, :type, :component_id, :service_id],
-
:filter => [:eq, :id, cmp[:component][:module_branch_id]]
-
}
-
branch = Model.get_obj(model_handle(:module_branch),sp_hash)
-
-
sp_cmp_hash = {
-
:cols => [:id, :display_name, :dsl_parsed],
-
:filter => [:eq, :id, branch[:component_id]]
-
}
-
cmp_module = Model.get_obj(model_handle(:component_module),sp_cmp_hash)
-
-
# ret << NodeViolations::NodeComponentParsingError.new(cmp_module[:display_name], "Component") unless cmp_module[:dsl_parsed]
-
ret << NodeViolations::NodeComponentParsingError.new(cmp_module[:display_name], "Component") unless branch.dsl_parsed?()
-
end
-
-
ret
-
end
-
-
#TODO: move to getting rid of namespace arg and using aug component template
-
# component_template can be augmented and have keys with objects:
-
# :module_branch
-
# :component_module
-
# :namespace
-
# opts can have
-
# :namespace
-
# :component_title
-
# :idempotent
-
1
def add_component(component_template,opts={})
-
component_title = opts[:component_title]
-
namespace = opts[:namespace] || (component_template[:namespace] && component_template[:namespace][:display_name])
-
-
component_template.update_with_clone_info!()
-
-
if module_branch = component_template[:module_branch]
-
raise ErrorUsage.new("You are not allowed to add component '#{component_template[:display_name]}' that belongs to test-module.") if module_branch[:type].eql?('test_module')
-
end
-
-
override_attrs = {:locked_sha => component_template.get_current_sha!()}
-
-
component_type = component_template.get_field?(:component_type)
-
if matching_cmp = Component::Instance.get_matching?(id_handle(),component_type,component_title)
-
if opts[:idempotent]
-
return matching_cmp.id_handle()
-
else
-
if component_title
-
# Just doing check here when there is a title, and not treating singletones
-
# because there is later constraint that picks up the singleton components
-
raise ErrorUsage.new("Component (#{matching_cmp.print_form()}) already exists")
-
end
-
end
-
end
-
-
if title_attr_name = check_and_ret_title_attribute_name?(component_template,component_title)
-
override_attrs = {
-
:ref => SQL::ColRef.cast(ComponentTitle.ref_with_title(component_type,component_title),:text),
-
:display_name => SQL::ColRef.cast(ComponentTitle.display_name_with_title(component_type,component_title),:text)
-
}
-
end
-
clone_opts = {:no_post_copy_hook => true, :ret_new_obj_with_cols => [:id,:display_name], :namespace => namespace}
-
new_cmp = clone_into(component_template,override_attrs,clone_opts)
-
new_cmp_idh = new_cmp.id_handle()
-
if title_attr_name
-
Component::Instance.set_title_attribute(new_cmp_idh,component_title,title_attr_name)
-
end
-
new_cmp_idh
-
end
-
-
1
def delete_component(component_idh)
-
# first check that component_idh belongs to this instance
-
sp_hash = {
-
:cols => [:id, :display_name],
-
:filter => [:and, [:eq, :id, component_idh.get_id()], [:eq, :node_node_id, id()]]
-
}
-
unless Model.get_obj(model_handle(:component),sp_hash)
-
raise ErrorIdInvalid.new(component_idh.get_id(),:component)
-
end
-
Model.delete_instance(component_idh)
-
end
-
-
1
def self.check_valid_id(model_handle,id,assembly_id=nil)
-
# filter does not include node group members
-
filter =
-
[:and,
-
[:eq, :id, id],
-
[:neq, :datacenter_datacenter_id, nil],
-
assembly_id && [:eq, :assembly_id, assembly_id]
-
].compact
-
opts = (assembly_id ? {:no_error_if_no_match => true} : {})
-
check_valid_id_helper(model_handle,id,filter,opts) ||
-
check_valid_id__node_member(model_handle,id,assembly_id)
-
end
-
1
def self.check_valid_id__node_member(model_handle,id,assembly_id)
-
assembly = NodeGroupRelation.get_node_member_assembly?(model_handle.createIDH(:id => id))
-
unless assembly and assembly.id == assembly_id
-
raise ErrorIdInvalid.new(id,pp_object_type())
-
end
-
id
-
end
-
1
private_class_method :check_valid_id__node_member
-
-
1
def self.name_to_id(model_handle,name,assembly_id=nil)
-
node_name, assembly_name = parse_user_friendly_name(name)
-
unless legal_display_name?(node_name)
-
raise ErrorNameInvalid.new(node_name,:node)
-
end
-
assembly_id ||= assembly_name && Assembly::Instance.name_to_id(model_handle.createMH(:component),assembly_name)
-
sp_hash = {
-
:cols => [:id,:assembly_id],
-
:filter => [:and,
-
[:eq, :display_name, node_name],
-
[:neq, :datacenter_datacenter_id, nil],
-
[:eq, :assembly_id, assembly_id]]
-
}
-
name_to_id_helper(model_handle,name,sp_hash)
-
end
-
-
1
def git_authorized?()
-
external_ref.hash()[:git_authorized]
-
end
-
1
def set_git_authorized(bool_val)
-
update_external_ref_field(:git_authorized,bool_val)
-
end
-
-
-
1
def get_and_update_status!()
-
# shortcut
-
if has_key?(:is_deployed)
-
return Type::Node.staged if not self[:is_deployed]
-
end
-
update_obj!(:is_deployed,:external_ref,:operational_status)
-
return Type::Node.staged if not self[:is_deployed]
-
get_and_update_operational_status!()
-
end
-
-
1
def get_and_update_operational_status!()
-
update_obj!(:external_ref,:operational_status)
-
op_status = CommandAndControl.get_node_operational_status(self)
-
if op_status
-
unless self[:operational_status] == op_status
-
update_operational_status!(op_status)
-
end
-
end
-
op_status || self[:operational_status]
-
end
-
-
-
# attribute on node
-
1
def update_operational_status!(op_status)
-
update(:operational_status => op_status.to_s)
-
self[:operational_status] = op_status.to_s
-
end
-
-
1
def update_admin_op_status!(op_status)
-
update(:admin_op_status => op_status.to_s)
-
self[:admin_op_status] = op_status.to_s
-
end
-
-
1
def update_agent_git_commit_id(agent_git_commit_id)
-
update(:agent_git_commit_id => agent_git_commit_id)
-
self[:agent_git_commit_id] = agent_git_commit_id
-
end
-
-
1
def get_external_ref()
-
get_field?(:external_ref)||{}
-
end
-
-
1
def get_admin_op_status()
-
get_field?(:admin_op_status)||{}
-
end
-
-
1
def get_iaas_type()
-
ret = get_external_ref()[:type]
-
ret && ret.to_sym
-
end
-
-
1
def instance_id()
-
get_external_ref()[:instance_id]
-
end
-
-
1
def pbuilderid()
-
self.class.pbuilderid(self)
-
end
-
1
def self.pbuilderid(node)
-
unless ret = CommandAndControl.pbuilderid(node)
-
raise Error.new("Node (#{node.get_field?(:display_name)}) with id (#{node.id.to_s}) does not have an #{PBuilderIDPrintName}")
-
end
-
ret
-
end
-
1
PBuilderIDPrintName = 'internal communication ID'
-
-
1
def persistent_dns()
-
get_hostname_external_ref()[:persistent_dns]
-
end
-
-
1
def elastic_ip()
-
get_hostname_external_ref()[:elastic_ip]
-
end
-
-
1
def get_hostname_external_ref()
-
get_field?(:hostname_external_ref)||{}
-
end
-
1
private :get_hostname_external_ref
-
-
-
# TODO: these may be depracted
-
1
def update_ordered_component_ids(order)
-
ordered_component_ids = "{ :order => [#{order.join(',')}] }"
-
update(:ordered_component_ids => ordered_component_ids)
-
self[:ordered_component_ids] = ordered_component_ids
-
end
-
-
1
def get_ordered_component_ids()
-
ordered_component_ids = self[:ordered_component_ids]
-
return Array.new unless ordered_component_ids
-
eval(ordered_component_ids)[:order]
-
end
-
# end of these may be depracted
-
-
1
def self.get_output_attrs_to_l4_input_ports(id_handles)
-
rows = get_objs_in_set(id_handles,{:cols => [:output_attrs_to_l4_input_ports]},{:keep_ref_cols => true})
-
return Hash.new if rows.empty?
-
# restructure so that get mapping from attribute_id to port
-
ret = Hash.new
-
rows.each do |row|
-
attr_id = row[:port_external_output][:external_attribute_id]
-
ret[attr_id] ||= Array.new
-
ret[attr_id] << row[:port_l4_input]
-
end
-
ret
-
end
-
-
1
def get_ui_info(datacenter)
-
datacenter_id_sym = datacenter[:id].to_s.to_sym
-
node_id_sym = self[:id].to_s.to_sym
-
# TODO: hack assumes that canm just take position from first node[:u1]
-
((datacenter[:ui]||{})[:items]||{})[node_id_sym] || (self[:ui]||{})[datacenter_id_sym] || (self[:ui]||{}).values.first
-
end
-
-
1
def update_ui_info!(ui,datacenter)
-
datacenter_id_sym = datacenter[:id].to_s.to_sym
-
node_id_sym = self[:id].to_s.to_sym
-
self[:ui] ||= Hash.new
-
self[:ui][datacenter_id_sym] = ui
-
end
-
-
1
def get_users()
-
node_user_list = get_objects_from_sp_hash(:columns => [:users])
-
user_list = Array.new
-
# TODO: just putting in username, not uid or gid
-
node_user_list.map do |u|
-
attr = u[:attribute]
-
val = attr[:value_asserted]||attr[:value_derived]
-
(val and attr[:display_name] == "username") ? {:id => attr[:id], :username => val, :avatar_filename => 'generic-user-male.png'} : nil
-
end.compact
-
end
-
-
1
def get_applications()
-
app_hash_list = get_objects_col_from_sp_hash({:columns => [:applications]},:component)
-
-
i18n = get_i18n_mappings_for_models(:component)
-
app_hash_list.map do |component|
-
name = component[:display_name]
-
cmp_i18n = i18n_string(i18n,:component,name)
-
component_el = {:id => component[:id], :name => name, :i18n => cmp_i18n}
-
component_icon_fn = ((component[:ui]||{})[:images]||{})[:tnail]
-
component_el.merge(component_icon_fn ? {:component_icon_filename => component_icon_fn} : {})
-
end
-
end
-
-
# Method will take already allocated elastic IP and assign it deploy node.
-
# Keep in mind this can only happen when node is 'running' state
-
1
def associate_elastic_ip?()
-
if persistent_hostname?
-
CommandAndControl.associate_elastic_ip(self)
-
end
-
end
-
-
1
def associate_persistent_dns?()
-
CommandAndControl.associate_persistent_dns?(self)
-
end
-
-
# Method will remove DNS information for node, this happens when we do not persistent
-
# DNS and by stopping node we do not need to keep DNS information
-
1
def strip_dns_info!()
-
update(:external_ref => self[:external_ref].merge(:dns_name => nil, :ec2_public_address => nil, :private_dns_name => nil ))
-
end
-
-
1
def get_node_service_checks()
-
return Array.new if get_objects_from_sp_hash(:columns => [:monitoring_agents]).empty?
-
-
# TODO: i18n treatment of service check names
-
get_objects_col_from_sp_hash({:columns => [:monitoring_items__node]},:monitoring_item)
-
end
-
1
def get_component_service_checks()
-
return Array.new if get_objects_from_sp_hash(:columns => [:monitoring_agents]).empty?
-
# TODO: i18n treatment of service check names
-
i18n = get_i18n_mappings_for_models(:component)
-
-
get_objects_from_sp_hash(:columns => [:monitoring_items__component]).map do |r|
-
cmp_name = r[:component][:display_name]
-
cmp_info = {:component_name => cmp_name,:component_i18n => i18n_string(i18n,:component,cmp_name) }
-
r[:monitoring_item].merge(cmp_info)
-
end
-
end
-
-
# returns external attribute links and port links
-
# returns [connected_links,dangling_links]
-
1
def self.get_external_connected_links(id_handles)
-
port_link_ret = get_conn_port_links(id_handles)
-
attr_link_ret = get_conn_external_attr_links(id_handles)
-
[port_link_ret[0]+attr_link_ret[0],port_link_ret[1]+attr_link_ret[1]]
-
end
-
-
# return ports links
-
# returns [connected_links,dangling_links]
-
1
def self.get_conn_port_links(id_handles,opts={})
-
ret = [Array.new,Array.new]
-
in_port_cols = [:id, :display_name, :input_port_links]
-
ndx_in_links = Hash.new
-
get_objs_in_set(id_handles,{:columns => in_port_cols}).each do |r|
-
link = r[:port_link]
-
ndx_in_links[link[:id]] = link
-
end
-
-
out_port_cols = [:id, :display_name, :output_port_links]
-
ndx_out_links = Hash.new
-
get_objs_in_set(id_handles,{:columns => out_port_cols}).each do |r|
-
link = r[:port_link]
-
ndx_out_links[link[:id]] = link
-
end
-
-
return ret if ndx_in_links.empty? and ndx_out_links.empty?
-
-
connected_links = (ndx_in_links.keys & ndx_out_links.keys).map{|id|ndx_in_links[id]}
-
-
dangling_links = (ndx_in_links.keys - ndx_out_links.keys).map{|id|ndx_in_links[id]}
-
dangling_links += (ndx_out_links.keys - ndx_in_links.keys).map{|id|ndx_out_links[id]}
-
[connected_links,dangling_links]
-
end
-
-
# return externally connected attribute links
-
# returns [connected_links,dangling_links]
-
1
def self.get_conn_external_attr_links(id_handles)
-
ret = [Array.new,Array.new]
-
-
ndx_in_links = get_objs_in_set(id_handles,:cols => [:id,:input_attribute_links_cmp]).inject({}) do |h,r|
-
link = r[:attribute_link]
-
link[:type] == "external" ? h.merge(link[:id] => link) : h
-
end
-
ndx_in_links = get_objs_in_set(id_handles,:cols => [:id,:input_attribute_links_node]).inject(ndx_in_links) do |h,r|
-
link = r[:attribute_link]
-
link[:type] == "external" ? h.merge(link[:id] => link) : h
-
end
-
-
ndx_out_links = get_objs_in_set(id_handles,:cols => [:id,:output_attribute_links_cmp]).inject({}) do |h,r|
-
link = r[:attribute_link]
-
link[:type] == "external" ? h.merge(link[:id] => link) : h
-
end
-
ndx_out_links = get_objs_in_set(id_handles,:cols => [:id,:output_attribute_links_node]).inject(ndx_out_links) do |h,r|
-
link = r[:attribute_link]
-
link[:type] == "external" ? h.merge(link[:id] => link) : h
-
end
-
-
return ret if ndx_in_links.empty? and ndx_out_links.empty?
-
-
connected_links = (ndx_in_links.keys & ndx_out_links.keys).map{|id|ndx_in_links[id]}
-
-
dangling_links = (ndx_in_links.keys - ndx_out_links.keys).map{|id|ndx_in_links[id]}
-
dangling_links += (ndx_out_links.keys - ndx_in_links.keys).map{|id|ndx_out_links[id]}
-
[connected_links,dangling_links]
-
end
-
-
# TODO: quick hack
-
1
def self.get_wspace_display(id_handle)
-
node_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
node_mh = id_handle.createMH(:model_name => :node)
-
node = get_objects(node_mh,{:id => node_id}).first
-
-
component_mh = node_mh.createMH(:model_name => :component)
-
component_ds = get_objects_just_dataset(component_mh,{:node_node_id => node_id})
-
attr_where_clause = {:is_port => true}
-
# TODO: can prune what fields included
-
attr_fs = Model::FieldSet.default(:attribute).with_added_cols(:component_component_id)
-
attribute_mh = node_mh.createMH(:model_name => :attribute)
-
attribute_ds = get_objects_just_dataset(attribute_mh,attr_where_clause,FieldSet.opt(attr_fs))
-
components = component_ds.graph(:left_outer,attribute_ds,{:component_component_id => :id}).all
-
node.merge(:component => components)
-
end
-
#######################
-
-
# TODO: should this be more generic and centralized?
-
1
def get_objects_associated_components()
-
assocs = Model.get_objects(ModelHandle.new(@c,:assoc_node_component),:node_id => self[:id])
-
return [] if assocs.nil?
-
assocs.map{|assoc|Model.get_object(IDHandle[:c=>@c,:guid => assoc[:component_id]])}
-
end
-
-
1
def get_obj_with_common_cols()
-
common_cols = self.class.common_columns()
-
ret = get_objs(:cols => common_cols).first
-
ret.materialize!(common_cols)
-
end
-
-
end
-
end
-
-
1
module XYZ
-
1
class NodeInterface < Model
-
# set_relation_name(:node,:interface)
-
-
### object access functions
-
#######################
-
end
-
-
1
class NodeViolations
-
1
class NodeComponentParsingError < self
-
1
def initialize(component, type)
-
@component = component
-
@type = type
-
end
-
1
def type()
-
:parsing_error
-
end
-
1
def description()
-
"#{@type} '#{@component}' has syntax errors in DSL files."
-
end
-
end
-
end
-
end
-
-
-
-
2
module DTK; class Node
-
2
module Clone; module Mixin
-
1
def add_model_specific_override_attrs!(override_attrs,target_obj)
-
override_attrs[:type] ||= Type::Node.staged
-
override_attrs[:ref] ||= SQL::ColRef.concat("s-",:ref)
-
override_attrs[:display_name] ||= SQL::ColRef.concat{|o|["s-",:display_name,o.case{[[{:ref_num=> nil},""],o.concat("-",:ref_num)]}]}
-
end
-
-
1
def source_clone_info_opts()
-
{:ret_new_obj_with_cols => [:id,:external_ref]}
-
end
-
-
1
def clone_pre_copy_hook(clone_source_object,opts={})
-
if clone_source_object.model_handle[:model_name] == :component
-
clone_source_object.clone_pre_copy_hook_into_node(self,opts)
-
else
-
clone_source_object
-
end
-
end
-
-
1
def clone_post_copy_hook(clone_copy_output,opts={})
-
component = clone_copy_output.objects.first
-
ClonePostCopyHookComponent.new(self,component).process(opts)
-
end
-
-
1
private
-
1
class ClonePostCopyHookComponent
-
1
def initialize(node,component)
-
@node = node
-
@component = component
-
@relevant_nodes = get_relevant_nodes(node,component)
-
@relevant_node_ids = @relevant_nodes.map{|n|n.id()}
-
@existing_ports = ExistingPorts.new() # for caching ports that exist already or ones that
-
end
-
-
1
def process(opts={})
-
create_new_ports_and_links(opts)
-
-
unless opts[:donot_create_pending_changes]
-
parent_action_id_handle = @node.get_parent_id_handle()
-
StateChange.create_pending_change_item(:new_item => @component.id_handle(), :parent => parent_action_id_handle)
-
end
-
end
-
-
1
private
-
1
def get_relevant_nodes(node,component)
-
if assembly_id = node.get_field?(:assembly_id)
-
component.update(:assembly_id => assembly_id)
-
assembly_idh = @node.id_handle(:model_name => :assembly,:id => assembly_id)
-
Assembly::Instance.get_nodes([assembly_idh])
-
else
-
[node]
-
end
-
end
-
-
1
def create_new_ports_and_links(opts={})
-
# get the link defs/component_ports associated with components on the node or for assembly, associated with an assembly node
-
node_link_defs_info = get_relevant_link_def_info()
-
-
return if node_link_defs_info.empty?()
-
-
new_ports = create_new_ports(node_link_defs_info,opts)
-
-
unless opts[:donot_create_internal_links]
-
# set internal_node_link_defs_info and add any with new ports
-
internal_node_link_defs_info = Array.new
-
node_id = @node.id()
-
node_link_defs_info.each do |r|
-
if r[:id] == node_id
-
link_def_id = r[:link_def_id]
-
r[:port] ||= new_ports.find{|port|port[:link_def_id] = link_def_id}
-
end
-
end
-
unless internal_node_link_defs_info.empty?
-
# TODO: AUTO-COMPLETE-LINKS: not sure if this is place to cal auto complete
-
LinkDef::AutoComplete.create_internal_links(@node,@component,internal_node_link_defs_info)
-
end
-
end
-
-
if opts[:outermost_ports]
-
opts[:outermost_ports] += materialize_ports!(new_ports)
-
end
-
end
-
-
1
def get_relevant_link_def_info()
-
ret = Array.new
-
sp_hash = {
-
:cols => [:node_link_defs_info],
-
:filter => [:oneof, :id, @relevant_node_ids]
-
}
-
link_def_info_to_prune = Model.get_objs(@node.model_handle(),sp_hash)
-
return ret if link_def_info_to_prune.empty?
-
-
component_type = @component.get_field?(:component_type)
-
component_id = @component.id()
-
ndx_ret = Hash.new
-
# prune if duplicate from perspective of link_def_id and remote_component_type
-
link_def_info_to_prune.each do |r|
-
link_def = r[:link_def]
-
remote_component_type = (r[:link_def_link]||{})[:remote_component_type] #could be nil
-
ndx = "#{link_def[:id].to_s}--#{remote_component_type}"
-
unless ndx_ret[ndx]
-
if link_def[:component_component_id] == component_id
-
ndx_ret[ndx] = r.merge(:direction => "input")
-
elsif remote_component_type == component_type
-
ndx_ret[ndx] = r.merge(:direction => "output")
-
end
-
end
-
end
-
ndx_ret.values()
-
end
-
-
# This creates either ports on @component or ports connected by link def to @component
-
1
def create_new_ports(node_link_defs_info,opts={})
-
ret = Array.new
-
-
# find info about any component/ports belonging to a relevant node of that is connected by link def to @component
-
ndx_cmps = get_relevant_components(node_link_defs_info).inject(Hash.new){|h,cmp|h.merge(cmp[:component_type] => cmp)}
-
get_relevant_ports(ndx_cmps.values).each{|port|@existing_ports.add_port(port)}
-
ndx_nodes = @relevant_nodes.inject(Hash.new){|h,n|h.merge(n[:id] => n)}
-
-
create_rows = Array.new
-
node_link_defs_info.each do |r|
-
link_def = r[:link_def]
-
possible_port = Port.ret_port_create_hash(link_def,@node,@component,:direction => r[:direction])
-
# returns true if new port taht is added
-
if @existing_ports.add_if_does_not_exists?(possible_port)
-
create_rows << possible_port
-
end
-
-
if r[:direction] == "input"
-
remote_cmp_type = r[:link_def_link][:remote_component_type]
-
# TODO: need to see if this needs enhancement to treat components that take titles
-
if remote_cmp = ndx_cmps[remote_cmp_type]
-
remote_node = ndx_nodes[remote_cmp[:node_node_id]]
-
# returns true if new port taht is added
-
possible_port = Port.ret_port_create_hash(link_def,remote_node,remote_cmp,:direction => "output")
-
if @existing_ports.add_if_does_not_exists?(possible_port)
-
create_rows << possible_port
-
end
-
end
-
end
-
end
-
create_opts = {:returning_sql_cols => [:link_def_id,:id,:display_name,:type,:connected]}
-
port_mh = @node.child_model_handle(:port)
-
Model.create_from_rows(port_mh,create_rows,opts)
-
end
-
-
1
def get_relevant_components(node_link_defs_info)
-
ret = Array.new
-
ndx_remote_cmp_types = Hash.new
-
node_link_defs_info.each do |r|
-
if r[:direction] == "input"
-
cmp_type = r[:link_def_link][:remote_component_type]
-
ndx_remote_cmp_types[cmp_type] ||= true
-
end
-
end
-
return ret if ndx_remote_cmp_types.empty?()
-
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:node_node_id,:component_type],
-
:filter => [:and, [:oneof, :node_node_id, @relevant_node_ids],
-
[:oneof,:component_type,ndx_remote_cmp_types.keys()]]
-
}
-
cmp_mh = @node.child_model_handle(:component)
-
Model.get_objs(cmp_mh,sp_hash)
-
end
-
-
1
def get_relevant_ports(cmps)
-
ret = Array.new
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:node_node_id,:component_id],
-
:filter => [:and,[:oneof, :node_node_id, @relevant_node_ids],
-
[:oneof,:component_id,cmps.map{|cmp|cmp.id()}]]
-
}
-
port_mh = @node.child_model_handle(:port)
-
Model.get_objs(port_mh,sp_hash)
-
end
-
-
1
class ExistingPorts
-
1
def initialize()
-
@ndx_ports = Hash.new() #indexed by [node_id][display_name]
-
end
-
-
1
def add_port(port)
-
pntr = @ndx_ports[port[:node_node_id]] ||= Hash.new
-
pntr[port[:display_name]] ||= port
-
end
-
-
1
def add_if_does_not_exists?(port)
-
unless port_exists?(port)
-
add_port(port)
-
end
-
end
-
1
private
-
1
def port_exists?(port)
-
!!(@ndx_ports[port[:node_node_id]]||{})[port[:display_name]]
-
end
-
end
-
-
# TODO: may deprecate; used just for GUI
-
1
def materialize_ports!(ports)
-
ret = Array.new
-
return ret if ports.empty?
-
# TODO: more efficient way to do this; instead include all needed columns in :returning_sql_cols above
-
port_mh = @node.child_model_handle(:port)
-
external_port_idhs = ports.map do |port_hash|
-
port_mh.createIDH(:id => port_hash[:id]) if ["component_internal_external","component_external"].include?(port_hash[:type])
-
end.compact
-
-
unless external_port_idhs.empty?
-
new_ports = Model.get_objs_in_set(external_port_idhs, {:cols => Port.common_columns})
-
i18n = @node.get_i18n_mappings_for_models(:component,:attribute)
-
new_ports.map do |port|
-
port.materialize!(Port.common_columns)
-
port[:name] = get_i18n_port_name(i18n,port)
-
end
-
end
-
end
-
end
-
end; end
-
end; end
-
2
module DTK; class Node
-
1
module DanglingLink
-
1
module Mixin
-
1
def update_dangling_links(filter={})
-
ret = Array.new
-
cmp_idhs = filter[:component_idhs]
-
dangling_links = cmp_idhs ? get_dangling_links__for_component(cmp_idhs) : get_dangling_links__for_node()
-
return ret if dangling_links.empty?
-
aug_dangling_links = dangling_links.map do |r|
-
r[:attribute_link].merge(r.hash_subset(:input_attribute,:other_input_link))
-
end
-
attr_mh = model_handle_with_auth_info(:attribute)
-
Attribute.update_and_propagate_attributes_for_delete_links(attr_mh,aug_dangling_links,:add_state_changes => true)
-
end
-
-
1
private
-
1
def get_dangling_links__for_node()
-
get_objs(:cols => [:dangling_input_links_from_components]) +
-
get_objs(:cols => [:dangling_input_links_from_nodes])
-
end
-
-
1
def get_dangling_links__for_component(cmp_idhs)
-
# TODO: more efficient way of doing this ratehr than using the same methods used for node; instead can
-
# index from component
-
cmp_ids = cmp_idhs.map{|idh|idh.get_id()}
-
ret = get_objs(:cols => [:dangling_input_links_from_components]).select{|r|cmp_ids.include?(r[:component][:id])}
-
port_link_ids = Component::Instance.get_port_links(cmp_idhs).map{|r|r[:id]}
-
unless port_link_ids.empty?
-
ret += get_objs(:cols => [:dangling_input_links_from_nodes]).select do |r|
-
port_link_ids.include?(r[:attribute_link][:port_link_id])
-
end
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
module Delete
-
1
module Mixin
-
# This wil be called only when self is non node group (i.e., top level node or target ref)
-
1
def destroy_and_delete(opts={})
-
if is_node_group?()
-
# TODO: support this; one way is to case on whether it has any members and if not
-
# allow it to be deleted; and if members indicate the syntax to delete an individual member"
-
raise ErrorUsage.new("Not supported: deleting a node group; its members can be deleted")
-
end
-
if is_target_ref?
-
destroy_and_delete__target_ref(opts)
-
else
-
destroy_and_delete__top_level_node(opts)
-
end
-
end
-
-
1
def destroy_and_reset(target_idh)
-
raise ErrorUsage.new("Command Not Supperetd")
-
# TODO: DTK-1857
-
if is_node_group?() or is_target_ref?()
-
raise ErrorUsage.new("destroy_and_reset_nodes not supported for service instances with node groups")
-
end
-
-
if CommandAndControl.destroy_node?(self,:reset => true)
-
Model.delete_instance(target_ref.id_handle) if target_ref
-
StateChange.create_pending_change_item(:new_item => id_handle(), :parent => target_idh)
-
end
-
update_agent_git_commit_id(nil)
-
attribute.clear_host_addresses()
-
end
-
-
1
def delete_object(opts={})
-
if target_ref_idh = opts[:delete_target_ref]
-
Model.delete_instance(target_ref_idh)
-
end
-
-
update_dangling_links()
-
-
if is_target_ref?()
-
# This wil be a node group member; need to bump down is assocaited node groups cardinality
-
node_group_member = ServiceNodeGroup::NodeGroupMember.create_as(self)
-
node_group_member.bump_down_associated_node_group_cardinality()
-
end
-
-
if opts[:update_task_template]
-
unless assembly = opts[:assembly]
-
raise Error.new("If update_task_template is set, :assembly must be given as an option")
-
end
-
update_task_templates_when_deleted_node?(assembly)
-
end
-
Model.delete_instance(id_handle())
-
true
-
end
-
-
1
private
-
1
def destroy_and_delete__target_ref(opts={})
-
suceeeded = true
-
if is_target_ref?(:not_deletable=>true)
-
# no op
-
return suceeeded
-
end
-
# check the reference count on the target ref; if one (or less can delet) since this
-
# is being initiated by a node group or top level node pointing to it
-
# if more than 1 reference count than succeed with no op
-
ref_count = TargetRef.get_reference_count(self)
-
if ref_count < 2
-
execute_destroy_and_delete(opts)
-
else
-
# no op
-
true
-
end
-
end
-
-
1
def destroy_and_delete__top_level_node(opts)
-
# see if there are any target refs this points to this
-
# if none then destroy and delete
-
# if 1 then check reference count
-
# since this is not anode group target_refs_info should not have size greater than 1
-
target_refs_info = TargetRef.get_linked_target_refs_info(self)
-
if target_refs_info.empty?
-
execute_destroy_and_delete(opts)
-
elsif target_refs_info.size == 1
-
target_ref_info = target_refs_info.first
-
opts_delete = opts
-
target_ref = target_ref_info.target_ref
-
if target_ref and target_ref_info.ref_count == 1
-
# this means to delete target ref also
-
opts_delete.merge(:delete_target_ref => target_ref.id_handle())
-
end
-
execute_destroy_and_delete(opts)
-
else
-
Log.error("Unexpected that (#{inspect}) is linked to more than 1 target refs")
-
delete_object(opts)
-
end
-
end
-
-
1
def execute_destroy_and_delete(opts={})
-
suceeeded = CommandAndControl.destroy_node?(self)
-
return false unless suceeeded
-
delete_object(opts)
-
end
-
-
1
def update_task_templates_when_deleted_node?(assembly)
-
# TODO: can be more efficient if have Task::Template method that takes node and deletes all teh nodes component in bulk
-
sp_hash = {
-
#:only_one_per_node,:ref are put in for info needed when getting title
-
:cols => [:id, :display_name, :node_node_id,:only_one_per_node,:ref],
-
:filter => [:eq, :node_node_id, id()]
-
}
-
components = Component::Instance.get_objs(model_handle(:component),sp_hash)
-
components.map{|cmp|Task::Template::ConfigComponents.update_when_deleted_component?(assembly,self,cmp)}
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Node
-
1
class ExternalRef
-
1
module Mixin
-
1
def update_external_ref_field(ext_ref_field,val)
-
update_hash_key(:external_ref,ext_ref_field,val)
-
end
-
1
def refresh_external_ref!()
-
self.delete(:external_ref)
-
get_field?(:external_ref)
-
end
-
-
1
def external_ref()
-
ExternalRef.new(self)
-
end
-
end
-
-
1
attr_reader :hash
-
1
def initialize(node)
-
@node = node
-
@hash = @node.get_field?(:external_ref)||{}
-
end
-
-
1
def references_image?(target)
-
CommandAndControl.references_image?(target,hash())
-
end
-
end
-
end
-
end
-
2
module DTK; class Node
-
1
class Filter
-
1
def filter(nodes)
-
filter_aux?(nodes)
-
end
-
1
def include?(node)
-
!filter_aux?([node]).empty?
-
end
-
1
class NodeList < self
-
1
def initialize(node_idhs)
-
@node_ids = node_idhs.map{|n|n.get_id()}
-
end
-
1
def filter_aux?(nodes)
-
nodes.select{|n|@node_ids.include?(n[:id])}
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Node
-
1
class Instance < self
-
1
def self.component_list_fields()
-
1
[:id,:display_name,:group_id,:external_ref,:ordered_component_ids]
-
end
-
-
1
def self.get(mh,opts={})
-
sp_hash = {
-
:cols => ([:id,:group_id,:display_name]+(opts[:cols]||[])).uniq,
-
:filter => [:neq,:datacenter_datacenter_id,nil]
-
}
-
get_objs(mh,sp_hash)
-
end
-
-
1
def self.get_unique_instance_name(mh,display_name)
-
display_name_regexp = Regexp.new("^#{display_name}")
-
matches = get(mh,:cols=>[:display_name]).select{|r|r[:display_name] =~ display_name_regexp}
-
if matches.empty?
-
return display_name
-
end
-
index = 2
-
matches.each do |r|
-
instance_name = r[:display_name]
-
if instance_name =~ /-([0-9]+$)/
-
instance_index = $1.to_i
-
if instance_index >= index
-
index += 1
-
end
-
end
-
end
-
"#{display_name}-#{index.to_s}"
-
end
-
end
-
end
-
end
-
# TODO: temp until move into meta directory
-
1
module DTK
-
1
class Node
-
1
module DNS
-
# arranged in precedence order
-
1
AttributeKeys =
-
[
-
'dns_enabled',
-
'dtk_dns_enabled',
-
'r8_dns_enabled'
-
]
-
end
-
end
-
-
1
module NodeMetaClassMixin
-
1
def up()
-
1
ds_column_defs :ds_attributes, :ds_key, :data_source, :ds_source_obj_type
-
1
external_ref_column_defs()
-
1
virtual_column :name, :type => :varchar, :local_dependencies => [:display_name]
-
1
column :tags, :json
-
# TODO: may change types; by virtue of being in alibrary we know about item; may need to distingusih between backed images versus barbones one; also may only treat node constraints with search objects
-
1
column :type, :varchar, :size => 25, :default => "instance" # Possible values are Node::Type.types
-
1
column :os_type, :varchar, :size => 25
-
1
column :os_identifier, :varchar, :size => 50 #augments os_type to identify specifics about os. From os_identier given region one can find unique ami
-
1
column :architecture, :varchar, :size => 10 #e.g., 'i386'
-
# TBD: in data source specfic now column :manifest, :varchar #e.g.,rnp-chef-server-0816-ubuntu-910-x86_32
-
# TBD: experimenting whetehr better to make this actual or virtual columns
-
1
column :image_size, :numeric, :size=>[8, 3] #in megs
-
-
# TODO: may replace is_deployed and operational_status with status
-
1
column :is_deployed, :boolean, :default => false
-
-
1
column :admin_op_status, :varchar, :size => 20, :default => 'pending'
-
1
column :operational_status, :varchar, :size => 20
-
1
column :managed, :boolean, :default => true
-
-
1
column :hostname_external_ref, :json
-
1
column :ordered_component_ids, :text
-
1
column :agent_git_commit_id, :text
-
-
1
virtual_column :status, :type => :varchar, :local_dependencies => [:is_deployed,:operational_status]
-
1
column :ui, :json
-
1
foreign_key :assembly_id, :component, FK_SET_NULL_OPT
-
1
foreign_key :node_binding_rs_id, :node_binding_ruleset, FK_SET_NULL_OPT
-
1
virtual_column :target_id, :type => ID_TYPES[:id], :local_dependencies => [:datacenter_datacenter_id]
-
1
virtual_column :parent_name, :possible_parents => [:library,:datacenter]
-
1
virtual_column :disk_size, :path => [:ds_attributes,:flavor,:disk] #in megs
-
# TODO how to have this conditionally "show up"
-
1
virtual_column :ec2_security_groups, :path => [:ds_attributes,:groups]
-
-
# can be null; points to the canonical member (a node template in the library) which is used by default when do node_group add_node
-
1
foreign_key :canonical_template_node_id, :node, FK_SET_NULL_OPT
-
-
1
virtual_column :canonical_template_node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :node,
-
:alias => :template_node,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond => {:id => q(:node,:canonical_template_node_id)},
-
:cols => [:id,:group_id, :display_name]
-
}]
-
-
1
virtual_column :project, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :datacenter,
-
:join_type => :inner,
-
:join_cond => {:id => p(:node,:datacenter)},
-
:cols => [:id,:project_id]
-
},
-
{
-
:model_name => :project,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond => {:id => q(:datacenter,:project_id)},
-
:cols => [:id,:display_name,:type]
-
}]
-
-
1
virtual_column :library, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :library,
-
:join_type => :inner,
-
:join_cond => {:id => p(:node,:library)},
-
:cols => [:id,:display_name]
-
}]
-
-
1
virtual_column :linked_target_refs, :type=>:json, :hidden=>true,
-
:remote_dependencies=>
-
[{
-
:model_name=>:node_group_relation,
-
:join_type=>:left_outer,
-
:join_cond=>{:node_group_id => q(:node,:id)},
-
:cols=>[:id,:display_name,:node_id]
-
},
-
{
-
:model_name=>:node,
-
:alias => :target_ref,
-
:convert => true,
-
:join_type=>:left_outer,
-
:join_cond=>{:id => q(:node_group_relation,:node_id)},
-
:cols=>[:id,:display_name,:type]
-
}]
-
-
##### for connection to ports and port link
-
1
virtual_column :node_link_defs_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name,:component_type, :extended_base, :implementation_id, :node_node_id]
-
},
-
{
-
:model_name => :link_def,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => [:id,id(:component),:local_or_remote,:link_type,:has_external_link,:has_internal_link]
-
},
-
{
-
:model_name => :port,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:link_def_id => q(:link_def,:id)},
-
:cols => [:id,:display_name,:type,:connected]
-
},
-
{
-
:model_name=>:link_def_link,
-
:convert => true,
-
:join_type=>:left_outer,
-
:join_cond=>{:link_def_id=>:link_def__id},
-
:cols=>[:id,:display_name,:remote_component_type,:position,:type]
-
}]
-
-
1
lambda__segment_port =
-
lambda{|port_cols,opts|
-
3
segment = {
-
:model_name => :port,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => port_cols
-
}
-
3
segment.merge!(opts) if (opts and not opts.empty?)
-
3
segment
-
}
-
-
1
lambda__segment_node_binding =
-
lambda{|nb_cols,opts|
-
1
segment = {
-
:model_name => :node_binding_ruleset,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:node,:node_binding_rs_id)},
-
:cols => nb_cols
-
}
-
1
segment.merge!(opts) if (opts and not opts.empty?)
-
1
segment
-
}
-
-
1
virtual_column :node_bindings, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[ lambda__segment_node_binding.call(NodeBindingRuleset.common_columns(),{}) ]
-
-
1
virtual_column :ports, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[lambda__segment_port.call([:id,:type,id(:node),:containing_port_id,:external_attribute_id,:direction,:location,:ref,:display_name,:name,:description],{})] #TODO: should we unify with Port.common_columns
-
1
virtual_column :ports_for_clone, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
lambda__segment_port.call(FactoryObject::CommonCols+[:type,:link_def_id,:direction,:component_type,:link_type],{}),
-
{
-
:model_name => :link_def,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:id => q(:port,:link_def_id)},
-
:cols => [:id,:ancestor_id]
-
}]
-
-
1
virtual_column :output_attrs_to_l4_input_ports, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
lambda__segment_port.call([:id,id(:node),:containing_port_id,:external_attribute_id],{:alias => :port_external_output,:filter => [:eq,:type,"external"]}), #TODO: what about component_external
-
{
-
:model_name => :port_link,
-
:alias => :port_link_l4,
-
:join_type => :inner,
-
:join_cond=>{:output_id => q(:port_external_output,:containing_port_id)},
-
:cols => [:input_id]
-
},
-
{ :model_name => :port,
-
:alias => :port_l4_input,
-
:join_type => :inner,
-
:filter => [:eq,:type,"l4"],
-
:join_cond=>{:id => q(:port_link_l4,:input_id)},
-
:cols => [:id,id(:node)]
-
}]
-
-
1
input_port_links_def =
-
[{
-
:model_name => :port,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name,:type]
-
},
-
{
-
:model_name => :port_link,
-
:convert => true,
-
:join_cond=>{:input_id =>q(:port,:id)},
-
:join_type => :inner,
-
:cols => [:id,:input_id,:output_id]
-
}]
-
1
virtual_column :input_port_links, :type => :json, :hidden => true,
-
:remote_dependencies => input_port_links_def
-
-
1
virtual_column :input_port_link_info, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
input_port_links_def +
-
[{
-
:model_name => :port,
-
:alias => :attr_other_end,
-
:join_cond=>{:id =>q(:port_link,:output_id)},
-
:join_type => :inner,
-
:cols => [:id,:display_name,:type]
-
}]
-
-
1
output_port_links_def =
-
[{
-
:model_name => :port,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name,:type]
-
},
-
{
-
:model_name => :port_link,
-
:convert => true,
-
:join_cond=>{:output_id =>q(:port,:id)},
-
:join_type => :inner,
-
:cols => [:id,:input_id,:output_id]
-
}]
-
-
1
virtual_column :output_port_links, :type => :json, :hidden => true,
-
:remote_dependencies => output_port_links_def
-
1
virtual_column :output_port_links, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
output_port_links_def +
-
[{
-
:model_name => :port,
-
:alias => :attr_other_end,
-
:join_cond=>{:id =>q(:port_link,:input_id)},
-
:join_type => :inner,
-
:cols => [:id,:display_name,:type]
-
}]
-
-
1
node_attrs_on_node_def =
-
[{
-
:model_name => :attribute,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name]
-
}]
-
-
1
lambda__segment_component =
-
lambda{|cmp_cols|
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => cmp_cols
-
6
}
-
}
-
1
lambda__components_and_attrs =
-
lambda{|args|
-
3
cmp_cols = args[:cmp_cols]
-
3
attr_cols = args[:attr_cols]
-
3
[lambda__segment_component.call(cmp_cols),
-
{
-
:model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => attr_cols
-
}]
-
}
-
1
lambda__cmps_and_non_default_attr_candidates =
-
lambda{|args|
-
1
cmp_cols = args[:cmp_cols]
-
1
attr_cols = args[:attr_cols]
-
1
[lambda__segment_component.call(cmp_cols),
-
{
-
:model_name => :attribute,
-
:convert => true,
-
:alias => :non_default_attr_candidate,
-
:join_type => :left_outer,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => attr_cols
-
}]
-
}
-
1
virtual_column :component_ws_module_branches, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[lambda__segment_component.call([:id,:display_name,:module_branch_id]),
-
{
-
:model_name => :module_branch,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:module_branch_id)},
-
:filter => [:eq, :is_workspace, true],
-
:cols => [:id,:display_name,:type,:component_id]
-
}]
-
1
virtual_column :component_module_branches, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[lambda__segment_component.call([:id,:display_name,:module_branch_id]),
-
{
-
:model_name => :module_branch,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:component,:module_branch_id)},
-
:cols => [:id,:display_name,:type,:component_id]
-
}]
-
1
virtual_column :components_and_attrs, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
lambda__components_and_attrs.call(
-
:cmp_cols=>FactoryObject::CommonCols+[:component_type],
-
:attr_cols=>FactoryObject::CommonCols+[:attribute_value,:required])
-
-
1
virtual_column :cmps_and_non_default_attr_candidates, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
lambda__cmps_and_non_default_attr_candidates.call(
-
:cmp_cols=>FactoryObject::CommonCols+[:ancestor_id,:component_type,:only_one_per_node],
-
:attr_cols=>FactoryObject::CommonCols+[:is_instance_value,:attribute_value,:external_ref,:data_type,:tags])
-
-
1
virtual_column :input_attribute_links_cmp, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
lambda__components_and_attrs.call(:cmp_cols=>[:id,:display_name, :component_type, id(:node)],:attr_cols=>[:id,:display_name]) +
-
[
-
{
-
:model_name => :attribute_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:input_id => q(:attribute,:id)},
-
:cols => [:id,:display_name, :type, :input_id,:output_id]
-
}]
-
1
virtual_column :input_attribute_links_node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
node_attrs_on_node_def +
-
[
-
{
-
:model_name => :attribute_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:input_id => q(:attribute,:id)},
-
:cols => [:id,:display_name, :type, :input_id,:output_id]
-
}]
-
1
virtual_column :output_attribute_links_cmp, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
lambda__components_and_attrs.call(:cmp_cols=>[:id,:display_name, :component_type, id(:node)],:attr_cols=>[:id,:display_name]) +
-
[
-
{
-
:model_name => :attribute_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:output_id => q(:attribute,:id)},
-
:cols => [:id,:display_name, :type, :input_id,:output_id]
-
}]
-
1
virtual_column :output_attribute_links_node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
node_attrs_on_node_def +
-
[
-
{
-
:model_name => :attribute_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:output_id => q(:attribute,:id)},
-
:cols => [:id,:display_name, :type, :input_id,:output_id]
-
}]
-
-
# used when node is deleted to find and update dangling attribute links
-
1
for_dangling_links =
-
[{
-
:model_name => :attribute_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:output_id => q(:output_attribute,:id)},
-
:cols => [:id,:type,:input_id,:index_map,:port_link_id]
-
},
-
{
-
:model_name => :attribute,
-
:alias => :input_attribute,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:attribute_link,:input_id)},
-
:cols => [:id,:display_name,:value_derived]
-
},
-
{
-
:model_name => :attribute_link,
-
:alias => :other_input_link,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:input_id => q(:attribute_link,:input_id)},
-
:cols => [:id,:type,:input_id,:index_map,:port_link_id]
-
}]
-
1
virtual_column :dangling_input_links_from_components, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :component,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name, :component_type, id(:node)]
-
},
-
{
-
:model_name => :attribute,
-
:alias => :output_attribute,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols => [:id,:display_name]
-
}] + for_dangling_links
-
-
1
virtual_column :dangling_input_links_from_nodes, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :attribute,
-
:alias => :output_attribute,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols => [:id,:display_name]
-
}] + for_dangling_links
-
-
##### end of for connection to ports and port links
-
-
1
virtual_column :assemblies, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :assembly,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:id =>:node__assembly_id},
-
:cols => [:id,:display_name,:group_id,:description]
-
}]
-
-
1
virtual_column :components, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id =>:node__id},
-
:cols => [:id,:display_name,:group_id,:description,:component_type,:version,:ref_num, :module_branch_id]
-
}]
-
-
1
virtual_column :components_with_namespace, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id =>:node__id},
-
:cols => [:id,:display_name,:group_id,:description,:component_type,:version,:ref_num, :module_branch_id]
-
},
-
{
-
:model_name => :module_branch,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id =>:component__module_branch_id},
-
:cols => [:id,:display_name,:component_id]
-
},
-
{
-
:model_name => :component_module,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id =>:module_branch__component_id},
-
:cols => [:id,:display_name,:namespace_id]
-
},
-
{
-
:model_name => :namespace,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id =>:component_module__namespace_id},
-
:cols => [:id,:display_name]
-
}]
-
-
1
virtual_column :component_list, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id =>:node__id},
-
:filter => [:eq, :assembly_id, nil],
-
:cols => Component::Instance.component_list_fields()
-
}]
-
1
virtual_column :node_centric_components, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id =>:node__id},
-
:filter => [:eq, :assembly_id, nil],
-
:cols => Component.pending_changes_cols()
-
}]
-
-
1
virtual_column :node_binding_ruleset, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :node_binding_ruleset,
-
:convert => true,
-
:join_type => :left_outer,
-
:join_cond=>{:id =>:node__node_binding_rs_id},
-
:cols => NodeBindingRuleset.common_columns()
-
}]
-
-
1
virtual_column :dns_enabled_on_assembly, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :assembly,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id =>:node__assembly_id},
-
:cols => [:id,:display_name,:group_id,:ref,:ref_num]
-
},
-
{
-
:model_name => :attribute,
-
:convert => true,
-
:alias => :dns_enabled_attribute,
-
:join_type => :left_outer,
-
:join_cond=>{:component_component_id =>:assembly__id},
-
:filter=>[:oneof,:display_name,Node::DNS::AttributeKeys],
-
:cols => [:id,:display_name,:group_id,:value_asserted,:value_derived]
-
}]
-
-
1
virtual_column :dns_enabled_on_node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:alias => :assembly,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:id =>:node__assembly_id},
-
:cols => [:id,:display_name,:group_id,:ref,:ref_num]
-
},
-
{
-
:model_name => :attribute,
-
:convert => true,
-
:alias => :dns_enabled_attribute,
-
:join_type => :left_outer,
-
:join_cond=>{:node_node_id =>:node__id},
-
:filter=>[:oneof,:display_name,Node::DNS::AttributeKeys],
-
:cols => [:id,:display_name,:group_id,:value_asserted,:value_derived]
-
}]
-
-
1
virtual_column :cmps_for_clone_into_node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id =>:node__id},
-
:filter => [:eq, :from_on_create_event, false],
-
:cols => [:id,:display_name,:dependencies, :extended_base, :component_type] #columns needed by finding dependencies
-
}]
-
-
1
virtual_column :has_pending_change, :type => :boolean, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :action,
-
# TODO: avoiding use of :node__node
-
:sequel_def => lambda{|ds|ds.where(:state => "pending").join(:component__component,{:id => :component_id}).group_and_count(:component__node_node_id)},
-
:join_type => :left_outer,
-
:join_cond=>{:node_node_id =>:node__id}
-
}]
-
-
-
1
virtual_column :violations, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :violation,
-
:join_type => :inner,
-
:convert => true,
-
:join_cond=>{:target_node_id => q(:node,:id)},
-
:cols=>[:id,:display_name,:severity,:description,:expression,:target_node_id,:updated_at]
-
}]
-
-
1
virtual_column :users, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:join_type => :inner,
-
:filter => [:and, [:eq, :basic_type, "user"]],
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols=>[:id,:node_node_id]
-
},
-
{
-
:model_name => :attribute,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols=>[:id,:component_component_id,:display_name,:value_asserted,:value_derived]
-
}
-
]
-
-
1
virtual_column :target, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[{
-
:model_name => :target,
-
:join_type => :inner,
-
:join_cond=>{:id => q(:node,:datacenter_datacenter_id)},
-
:cols=>[:id,:display_name,:iaas_properties]
-
}]
-
-
1
monitoring_items_cols_def = [:id,:display_name,:service_name,:condition_name,:condition_description,:enabled,:params,:attributes_to_monitor]
-
1
virtual_column :monitoring_items__node, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :monitoring_item,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols=> monitoring_items_cols_def
-
},
-
]
-
1
virtual_column :monitoring_items__component, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols=>[:id,:display_name]
-
},
-
{
-
:model_name => :monitoring_item,
-
:convert => true,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id => q(:component,:id)},
-
:cols=>monitoring_items_cols_def
-
},
-
]
-
-
# TODO: just for testing
-
1
application_basic_types = %w{application service database language extension}
-
# in dock 'applications means wider than basic_type == applicationsn
-
1
virtual_column :applications, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:join_type => :inner,
-
:filter => [:and,[:oneof, :basic_type, application_basic_types]],
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols=>[:id,:node_node_id,:display_name,:ui]
-
}
-
]
-
1
virtual_column :monitoring_agents, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:join_type => :inner,
-
:filter => [:eq, :specific_type, "monitoring_agent"],
-
:join_cond=>{:node_node_id => q(:node,:id)},
-
:cols=>[:id,:node_node_id,:display_name]
-
}
-
]
-
-
1
virtual_column :deprecate_port_links, :type => :json, :hidden => true,
-
:remote_dependencies =>
-
[
-
{
-
:model_name => :component,
-
:join_type => :inner,
-
:join_cond=>{:node_node_id =>:node__id},
-
:cols => [:id,:display_name,:node_node_id]
-
},
-
{
-
:model_name => :attribute,
-
:join_type => :inner,
-
:join_cond=>{:component_component_id =>:component__id},
-
:cols => [:id,:display_name,:component_component_id]
-
},
-
{
-
:model_name => :attribute_link,
-
:join_cond=>{:input_id =>:attribute__id},
-
:cols => [:id,:type,:hidden,{:output_id => :other_end_output_id},:input_id,:node_node_id]
-
},
-
{
-
:model_name => :attribute_link,
-
:join_cond=>{:output_id =>:attribute__id},
-
:cols => [:id,:type,:hidden,{:input_id => :other_end_input_id},:output_id,:node_node_id]
-
}
-
]
-
-
1
foreign_key :data_source_id, :data_source, FK_SET_NULL_OPT
-
1
many_to_one :library, :datacenter, :project
-
1
one_to_many :attribute, :port, :attribute_link, :component, :component_ref, :node_interface, :address_access_point, :monitoring_item
-
-
1
set_submodel(:node_group)
-
end
-
end
-
end
-
# TODO: better unify with code in model/attribute special processing
-
1
module DTK
-
1
class Node
-
1
class NodeAttribute
-
1
r8_nested_require('node_attribute','mixin')
-
1
r8_nested_require('node_attribute','class_mixin')
-
1
r8_nested_require('node_attribute','cache')
-
1
r8_nested_require('node_attribute','default_value')
-
-
1
def initialize(node)
-
@node = node
-
end
-
-
1
def root_device_size()
-
ret_value?(:root_device_size)
-
end
-
-
1
def cardinality(opts={})
-
ret = ret_value?(:cardinality)
-
(opts[:no_default] ? ret : ret||CardinalityDefault)
-
end
-
1
CardinalityDefault = 1
-
-
1
def puppet_version(opts={})
-
puppet_version = ret_value?(:puppet_version)
-
if opts[:raise_error_if_invalid]
-
raise_error_if_invalid_puppet_version(puppet_version)
-
end
-
puppet_version||R8::Config[:puppet][:version]
-
end
-
-
1
def raise_error_if_invalid_puppet_version(puppet_version)
-
unless puppet_version.nil? or puppet_version.empty?
-
unless RubyGemsChecker.gem_exists?('puppet', puppet_version)
-
# TODO: took out because this is giving false posatives
-
# raise ErrorUsage.new("Invalid Puppet version (#{puppet_version})")
-
Log.error("RubyGemsChecker.gem_exists? test fails with Puppet version (#{puppet_version})")
-
end
-
end
-
end
-
-
1
def clear_host_addresses()
-
if attr = @node.get_node_attribute?('host_addresses_ipv4',:cols=>[:id,:group_id,:value_derived])
-
if host_addresses = attr[:value_derived]
-
if host_addresses.find{|a|!a.nil?}
-
cleared_vals = host_addresses.map{|a|nil}
-
attr.merge!(:value_derived => cleared_vals)
-
Attribute.update_and_propagate_attributes(attr.model_handle(),[attr])
-
end
-
end
-
end
-
end
-
-
1
TargetRefAttributes = ['host_addresses_ipv4','name','fqdn','node_components','puppet_version','root_device_size']
-
1
TargetRefAttributeFilter = [:oneof,:display_name,TargetRefAttributes]
-
1
NodeTemplateAttributes = ['host_addresses_ipv4','node_components','fqdn']
-
4
AssemblyTemplateAttributeFilter = [:and] + NodeTemplateAttributes.map{|a|[:neq,:display_name,a]}
-
# TODO: FieldInfo and above should be normalized
-
# TODO: need to better coordinate with code in model/attribute special processing and also the
-
# constants in FieldInfo
-
1
FieldInfo = {
-
:name => {:name => :name},
-
:cardinality => {:name => :cardinality, :semantic_type => :integer},
-
:root_device_size => {:name => :root_device_size, :semantic_type => :integer},
-
:puppet_version => {:name => :puppet_version}
-
}
-
-
1
def self.field_info(name)
-
unless ret = FieldInfo[name.to_sym]
-
raise Error.new("No node attribute with name (#{name})")
-
end
-
ret
-
end
-
1
def field_info(name)
-
self.class.field_info(name)
-
end
-
-
1
def self.target_ref_attributes_filter()
-
TargetRefAttributeFilter
-
end
-
1
def self.assembly_template_attribute_filter()
-
AssemblyTemplateAttributeFilter
-
end
-
-
-
# for each node, one of following actions is taken
-
# - if attribute does not exist, it is created with the given value
-
# - if attribute exists but has vlaue differing from 'value' then it is updated
-
# - otherwise no-op
-
1
def self.create_or_set_attributes?(nodes,name,value,extra_fields={})
-
-
node_idhs = nodes.map{|n|n.id_handle()}
-
ndx_attrs = get_ndx_attributes(node_idhs,name)
-
-
to_create_on_node = Array.new
-
to_change_attrs = Array.new
-
-
nodes.each do |node|
-
if attr = ndx_attrs[node[:id]]
-
existing_val = attr[:attribute_value]
-
# just for simplicity no checking whether extra_fields match in
-
# test of update needed
-
unless extra_fields.empty? and existing_val == value
-
to_change_attrs << attr
-
end
-
else
-
to_create_on_node << node
-
end
-
end
-
to_change_attrs.each{|attr|attr.update(extra_fields.merge(:value_asserted => value))}
-
-
unless to_create_on_node.empty?
-
create_rows = to_create_on_node.map{|n|attribute_create_hash(n.id,name,value,extra_fields)}
-
attr_mh = to_create_on_node.first.model_handle().create_childMH(:attribute)
-
Model.create_from_rows(attr_mh,create_rows,:convert => true)
-
end
-
end
-
-
1
def self.cache_attribute_values!(nodes,name)
-
nodes_to_query = nodes.reject{|node|Cache.attr_is_set?(node,name)}
-
return if nodes_to_query.empty?
-
node_idhs = nodes_to_query.map{|n|n.id_handle()}
-
ndx_attrs = get_ndx_attributes(node_idhs,name)
-
-
field_info = field_info(name)
-
nodes_to_query.each do |node|
-
if attr = ndx_attrs[node[:id]]
-
if val = attr[:attribute_value]
-
Cache.set!(node,val,field_info)
-
end
-
end
-
end
-
end
-
-
1
private
-
# attributes indexed by node id
-
1
def self.get_ndx_attributes(node_idhs,name)
-
cols = [:id,:node_node_id,:attribute_value]
-
field_info = field_info(name)
-
filter = [:eq,:display_name,field_info[:name].to_s]
-
Node.get_node_level_attributes(node_idhs,:cols=>cols,:add_filter=>filter).inject(Hash.new) do |h,a|
-
h.merge(a[:node_node_id] => a)
-
end
-
end
-
-
1
def self.attribute_create_hash(node_id,name,value,extra_fields={})
-
name = name.to_s
-
{:ref => name,
-
:display_name => name,
-
:value_asserted => value,
-
:node_node_id => node_id
-
}.merge(extra_fields)
-
end
-
-
1
def ret_value?(name)
-
field_info = field_info(name)
-
if Cache.attr_is_set?(@node,name)
-
Cache.get(@node,name)
-
else
-
raw_val = get_raw_value?(name)
-
Cache.set!(@node,raw_val,field_info)
-
end
-
end
-
-
1
def get_raw_value?(name)
-
attr = @node.get_node_attribute?(name.to_s,:cols => [:id,:group_id,:attribute_value])
-
attr && attr[:attribute_value]
-
end
-
end
-
end
-
end
-
2
module DTK; class Node
-
1
class NodeAttribute
-
1
module Cache
-
1
def self.attr_is_set?(node,name)
-
(node[CacheKeyOnNode]||{}).has_key?(name.to_sym)
-
end
-
1
def self.get(node,name)
-
(node[CacheKeyOnNode]||{})[name.to_sym]
-
end
-
1
def self.set!(node,raw_val,field_info)
-
name = field_info[:name]
-
semantic_data_type = field_info[:semantic_type]
-
val =
-
if raw_val and semantic_data_type
-
Attribute::SemanticDatatype.convert_to_internal_form(semantic_data_type,raw_val)
-
else
-
raw_val
-
end
-
(node[CacheKeyOnNode] ||= Hash.new)[name.to_sym] = val
-
end
-
1
CacheKeyOnNode = :attribute_value_cache
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
class NodeAttribute
-
1
module ClassMixin
-
1
def cache_attribute_values!(nodes,name)
-
NodeAttribute.cache_attribute_values!(nodes,name)
-
end
-
-
# target_ref_attributes are ones used on target refs and can also be on instances
-
1
def get_target_ref_attributes(node_idhs,opts={})
-
cols = opts[:cols] || [:id,:display_name,:node_node_id,:attribute_value,:data_type]
-
add_filter = NodeAttribute.target_ref_attributes_filter()
-
get_node_level_attributes(node_idhs,:cols=>cols,:add_filter=>add_filter)
-
end
-
-
# node_level_assembly_template_attributes are ones that are persisted in service modules
-
1
def get_node_level_assembly_template_attributes(node_idhs,opts={})
-
cols = opts[:cols] || [:id,:display_name,:node_node_id,:attribute_value,:data_type]
-
add_filter = NodeAttribute.assembly_template_attribute_filter()
-
get_node_level_attributes(node_idhs,:cols=>cols,:add_filter=>add_filter)
-
end
-
-
1
def get_node_level_attributes(node_idhs,opts={})
-
ret = Array.new
-
return ret if node_idhs.empty?()
-
filter = [:oneof,:node_node_id,node_idhs.map{|idh|idh.get_id()}]
-
if add_filter = opts[:add_filter]
-
filter = [:and,filter,add_filter]
-
end
-
cols = opts[:cols] || [:id,:group_id,:display_name,:required]
-
sp_hash = {
-
:cols => cols,
-
:filter => filter,
-
}
-
attr_mh = node_idhs.first.createMH(:attribute)
-
opts = (cols.include?(:ref) ? {:keep_ref_cols => true} : {})
-
get_objs(attr_mh,sp_hash,opts)
-
end
-
-
1
def get_virtual_attributes(attrs_to_get,cols,field_to_match=:display_name)
-
ret = Hash.new
-
# TODO: may be able to avoid this loop
-
attrs_to_get.each do |node_id,hash_value|
-
attr_info = hash_value[:attribute_info]
-
node = hash_value[:node]
-
attr_names = attr_info.map{|a|a[:attribute_name].to_s}
-
rows = node.get_virtual_attributes(attr_names,cols,field_to_match)
-
rows.each do |attr|
-
attr_name = attr[field_to_match]
-
ret[node_id] ||= Hash.new
-
ret[node_id][attr_name] = attr
-
end
-
end
-
ret
-
end
-
-
# TODO: need tp fix up below; maybe able to deprecate
-
1
def get_node_attribute_values(id_handle,opts={})
-
c = id_handle[:c]
-
node_obj = get_object(id_handle,opts)
-
raise Error.new("node associated with (#{id_handle}) not found") if node_obj.nil?
-
ret = node_obj.get_direct_attribute_values(:value) || {}
-
-
cmps = node_obj.get_objects_associated_components()
-
cmps.each{|cmp|
-
ret[:component]||= {}
-
cmp_ref = cmp.get_qualified_ref.to_sym
-
ret[:component][cmp_ref] =
-
cmp[:external_ref] ? {:external_ref => cmp[:external_ref]} : {}
-
values = cmp.get_direct_attribute_values(:value,{:attr_include => [:external_ref]})
-
ret[:component][cmp_ref][:attribute] = values if values
-
}
-
ret
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
class NodeAttribute
-
1
module DefaultValue
-
1
def self.host_addresses_ipv4()
-
{
-
:required => false,
-
:read_only => true,
-
:is_port => true,
-
:cannot_change => false,
-
:data_type => 'json',
-
:value_derived => [nil],
-
:semantic_type_summary => 'host_address_ipv4',
-
:display_name =>"host_addresses_ipv4",
-
:dynamic =>true,
-
:hidden =>true,
-
:semantic_type =>{':array'=>'host_address_ipv4'}
-
}
-
end
-
-
1
def self.fqdn()
-
{
-
:required => false,
-
:read_only => true,
-
:is_port => true,
-
:cannot_change => false,
-
:data_type => 'string',
-
:display_name => 'fqdn',
-
:dynamic => true,
-
:hidden => true,
-
}
-
end
-
-
1
def self.node_components()
-
{
-
:required => false,
-
:read_only => true,
-
:is_port => true,
-
:cannot_change => false,
-
:data_type => 'json',
-
:display_name => 'node_components',
-
:dynamic => true,
-
:hidden => true,
-
}
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
class NodeAttribute
-
1
module Mixin
-
1
def attribute()
-
NodeAttribute.new(self)
-
end
-
-
1
def get_node_attribute?(attribute_name,opts={})
-
get_node_attributes(opts.merge(:filter => [:eq,:display_name,attribute_name])).first
-
end
-
1
def get_node_attributes(opts={})
-
Node.get_node_level_attributes([id_handle()],:cols=>opts[:cols],:add_filter=>opts[:filter])
-
end
-
-
# TODO: stub; see if can use get_node_attributes
-
1
def get_node_attributes_stub()
-
Array.new
-
end
-
# TODO: once see calling contex, remove stub call
-
1
def get_node_and_component_attributes(opts={})
-
node_attrs = get_node_attributes_stub()
-
component_attrs = get_objs(:cols => [:components_and_attrs]).map{|r|r[:attribute]}
-
component_attrs + node_attrs
-
end
-
-
1
def set_attributes(av_pairs)
-
Attribute::Pattern::Node.set_attributes(self,av_pairs)
-
end
-
-
1
def get_attributes_print_form(opts={})
-
if filter = opts[:filter]
-
case filter
-
when :required_unset_attributes
-
get_attributes_print_form_aux(lambda{|a|a.required_unset_attribute?()})
-
else
-
raise Error.new("not treating filter (#{filter}) in Assembly::Instance#get_attributes_print_form")
-
end
-
else
-
get_attributes_print_form_aux()
-
end
-
end
-
-
1
def get_attributes_print_form_aux(filter_proc=nil)
-
node_attrs = get_node_attributes_stub()
-
component_attrs = get_objs(:cols => [:components_and_attrs]).map do |r|
-
attr = r[:attribute]
-
# TODO: more efficient to have sql query do filtering
-
if filter_proc.nil? or filter_proc.call(attr)
-
display_name_prefix = "#{r[:component].display_name_print_form()}/"
-
attr.print_form(Opts.new(:display_name_prefix => display_name_prefix))
-
end
-
end.compact
-
(component_attrs + node_attrs).sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
1
private :get_attributes_print_form_aux
-
-
1
def get_virtual_attribute(attribute_name,cols,field_to_match=:display_name)
-
sp_hash = {
-
:model_name => :attribute,
-
:filter => [:eq, field_to_match, attribute_name],
-
:cols => cols
-
}
-
get_children_from_sp_hash(:attribute,sp_hash).first
-
end
-
# TODO: may write above in terms of below
-
1
def get_virtual_attributes(attribute_names,cols,field_to_match=:display_name)
-
sp_hash = {
-
:model_name => :attribute,
-
:filter => [:oneof, field_to_match, attribute_names],
-
:cols => Aux.array_add?(cols,field_to_match)
-
}
-
get_children_from_sp_hash(:attribute,sp_hash)
-
end
-
-
# attribute on component on node
-
# assumption is that component cannot appear more than once on node
-
1
def get_virtual_component_attribute(cmp_assign,attr_assign,cols)
-
base_sp_hash = {
-
:model_name => :component,
-
:filter => [:and, [:eq, cmp_assign.keys.first,cmp_assign.values.first],[:eq, :node_node_id,self[:id]]],
-
:cols => [:id]
-
}
-
join_array =
-
[{
-
:model_name => :attribute,
-
:convert => true,
-
:join_type => :inner,
-
:filter => [:eq, attr_assign.keys.first,attr_assign.values.first],
-
:join_cond => {:component_component_id => :component__id},
-
:cols => cols.include?(:component_component_id) ? cols : cols + [:component_component_id]
-
}]
-
row = Model.get_objects_from_join_array(model_handle.createMH(:component),base_sp_hash,join_array).first
-
row && row[:attribute]
-
end
-
-
-
-
####Things below heer shoudl be cleaned up or deprecated
-
#####
-
# TODO: should be centralized
-
1
def get_contained_attribute_ids(opts={})
-
get_directly_contained_object_ids(:attribute)||[]
-
end
-
-
1
def get_direct_attribute_values(type,opts={})
-
parent_id = IDInfoTable.get_id_from_id_handle(id_handle)
-
attr_val_array = Model.get_objects(ModelHandle.new(@c,:attribute),nil,:parent_id => parent_id)
-
return nil if attr_val_array.nil?
-
return nil if attr_val_array.empty?
-
hash_values = {}
-
attr_type = {:asserted => :value_asserted, :derived => :value_derived, :value => :attribute_value}[type]
-
attr_val_array.each{|attr|
-
hash_values[attr.get_qualified_ref.to_sym] =
-
{:value => attr[attr_type],:id => attr[:id]}
-
}
-
{:attribute => hash_values}
-
end
-
-
################
-
# TODO: may be aqble to deprecate most or all of below
-
### helpers
-
1
def ds_attributes(attr_list)
-
[:ds_attributes]
-
end
-
# TODO: rename subobject to sub_object
-
1
def is_ds_subobject?(relation_type)
-
false
-
end
-
##########
-
-
1
private
-
1
def check_and_ret_title_attribute_name?(component_template,component_title)
-
title_attr_name = component_template.get_title_attribute_name?()
-
if component_title and title_attr_name.nil?
-
raise ErrorUsage.new("Component (#{component_template.component_type_print_form()}) is given a title, but should not have one")
-
elsif component_title.nil? and title_attr_name
-
cmp_name = component_template.component_type_print_form()
-
raise ErrorUsage.new("Component (#{cmp_name}) needs a title; use form #{cmp_name}[TITLE]")
-
end
-
title_attr_name
-
end
-
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Node
-
# This refers to an object that is used to point to an existing node in a target; it is a peer of Node::Template
-
1
class TargetRef < self
-
1
r8_nested_require('target_ref','input')
-
1
r8_nested_require('target_ref','clone')
-
-
1
def is_target_ref?()
-
true
-
end
-
# handling case where node.class may be a parent of TargetRef, but represents one
-
1
def self.is_target_ref?(node)
-
types.include?(node.get_field?(:type))
-
end
-
-
# opts can have
-
# {:not_deletable => true}
-
1
def self.types(opts={})
-
if opts[:not_deletable]
-
TypesNotDeletable
-
else
-
Types
-
end
-
end
-
1
Types = [Type::Node.target_ref,Type::Node.target_ref_staged,Type::Node.physical]
-
1
TypesNotDeletable = [Type::Node.physical]
-
-
1
def self.assembly_node_print_form(target_ref)
-
target_ref.update_object!(:ref,:display_name)
-
unless name = target_ref[:display_name]||target_ref[:ref]
-
return 'NODE'
-
end
-
if name =~ Regexp.new("^#{physical_node_prefix()}(.+$)")
-
$1
-
else
-
name
-
end
-
end
-
-
1
def self.ret_display_name(type,target_ref_name,opts={})
-
case type
-
when :physical
-
"#{physical_node_prefix()}#{name}"
-
when :base_node_link
-
ret = target_ref_name
-
if index = opts[:index]
-
ret = "#{ret}#{IndexDelim}#{index.to_s}"
-
end
-
if assembly = opts[:assembly]
-
assembly_name = assembly.get_field?(:display_name)
-
ret = "#{assembly_name}#{AssemblyDelim}#{ret}"
-
end
-
ret
-
else
-
raise Error.new("Unexpected type (#{type})")
-
end
-
end
-
1
def self.node_member_index(target_ref)
-
if Type::Node.physical == target_ref.get_field?(:type)
-
return nil
-
end
-
ret = nil
-
if display_name = target_ref.get_field?(:display_name)
-
if display_name =~ Regexp.new("#{IndexDelim}([0-9]+$)")
-
ret = $1.to_i
-
end
-
end
-
unless ret
-
Log.error("Unexpected cannot find an index number")
-
end
-
ret
-
end
-
-
1
AssemblyDelim = '::'
-
1
IndexDelim = ':'
-
1
PhysicalNodePrefix = 'physical--'
-
1
def self.physical_node_prefix()
-
PhysicalNodePrefix
-
end
-
-
# returns hash of form {node_id => NodeWithTargetRefs,..}
-
1
NodeWithTargetRefs = Struct.new(:node,:target_refs)
-
1
def self.get_ndx_linked_target_refs(node_mh,node_ids)
-
ret = Hash.new
-
return ret if node_ids.empty?
-
sp_hash = {
-
:cols => [:id,:display_name,:type,:linked_target_refs],
-
:filter => [:oneof, :id, node_ids]
-
}
-
get_objs(node_mh,sp_hash).each do |n|
-
n.delete(:node_group_relation)
-
target_ref = n.delete(:target_ref)
-
pntr = ret[n[:id]] ||= NodeWithTargetRefs.new(n,Array.new)
-
pntr.target_refs << target_ref if target_ref
-
end
-
ret
-
end
-
-
# The class method get_nodes(target) gets the target refs
-
# opts keys:
-
# :managed
-
# :mark_free_nodes
-
# :cols
-
1
def self.get_nodes(target,opts={})
-
sp_hash = {
-
:cols => opts[:cols] || [:id, :display_name, :tags, :ref, :type, :assembly_id, :datacenter_datacenter_id, :managed],
-
:filter => [:and,
-
[:oneof, :type, [Type::Node.target_ref,Type::Node.physical]],
-
[:eq, :datacenter_datacenter_id, target[:id]],
-
opts[:managed] && [:eq, :managed, true]].compact
-
}
-
node_mh = target.model_handle(:node)
-
ret = get_objs(node_mh,sp_hash,:keep_ref_cols => true)
-
if opts[:mark_free_nodes]
-
ndx_matched_target_refs = ndx_target_refs_to_their_instances(ret.map{|r|r.id_handle})
-
unless ndx_matched_target_refs.empty?
-
ret.each do |r|
-
unless ndx_matched_target_refs[r[:id]]
-
r.merge!(:free_node => true)
-
end
-
end
-
end
-
end
-
ret
-
end
-
-
1
def self.get_target_running_nodes(target, opts = {})
-
active_nodes = Array.new()
-
sp_hash = {
-
:cols => opts[:cols] || [:id, :display_name, :tags, :ref, :type, :assembly_id, :datacenter_datacenter_id, :managed],
-
:filter => [:and,
-
# [:oneof, :type, [Type::Node.target_ref,Type::Node.physical]],
-
[:eq, :datacenter_datacenter_id, target[:id]],
-
opts[:managed] && [:eq, :managed, true]].compact
-
}
-
node_mh = target.model_handle(:node)
-
ret = get_objs(node_mh,sp_hash,:keep_ref_cols => true)
-
-
ret.each do |node|
-
op_status = node.get_admin_op_status()
-
if !node.is_node_group? && op_status.eql?('running')
-
active_nodes << node
-
end
-
end
-
-
active_nodes
-
end
-
-
# The class method get_free_nodes returns managed nodes without any assembly on them
-
1
def self.get_free_nodes(target)
-
ret = get_nodes(target,:mark_free_nodes=>true,:managed=>true)
-
ret.select{|r|r[:free_node]}
-
end
-
-
1
def self.list(target)
-
nodes = get_nodes(target, :cols => common_columns() + [:ref])
-
cols_except_name = common_columns() - [:display_name]
-
nodes.map do |n|
-
el = n.hash_subset(*cols_except_name)
-
#TODO: unify with the assembly print name
-
el.merge(:display_name => n[:display_name]||n[:ref])
-
end.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def self.create_nodes_from_inventory_data(target, inventory_data)
-
Input.create_nodes_from_inventory_data(target, inventory_data)
-
end
-
-
# returns hash of form {NodeInstanceId -> [target_refe_idh1,...],,}
-
# filter can be of form
-
# {:node_instance_idhs => [idh1,,]}, or
-
# {:node_group_relation_idhs => [idh1,,]}
-
1
def self.ndx_matching_target_ref_idhs(filter)
-
ret = Hash.new
-
filter_field = sample_idh = nil
-
if filter[:node_instance_idhs]
-
idhs = filter[:node_instance_idhs]
-
filter_field = :node_group_id
-
elsif filter[:node_group_relation_idhs]
-
idhs = filter[:node_group_relation_idhs]
-
filter_field = :id
-
else
-
raise Error.new("Unexpected filter: #{filter.inspect}")
-
end
-
if idhs.empty?
-
return ret
-
end
-
-
#node_group_id matches on instance side and node_id on target ref side
-
sp_hash = {
-
:cols => [:node_id,:node_group_id],
-
:filter => [:oneof,filter_field,idhs.map{|n|n.get_id}]
-
}
-
sample_idh = idhs.first
-
target_ref_mh = sample_idh.createMH(:node)
-
ngr_mh = sample_idh.createMH(:node_group_relation)
-
Model.get_objs(ngr_mh,sp_hash).each do |r|
-
node_id = r[:node_group_id]
-
(ret[node_id] ||= Array.new) << target_ref_mh.createIDH(:id => r[:node_id])
-
end
-
ret
-
end
-
-
1
def self.get_reference_count(target_ref)
-
sp_hash = {
-
:cols => [:id,:group_id],
-
:filter => [:eq, :node_id, target_ref.id]
-
}
-
ngr_mh = target_ref.model_handle(:node_group_relation)
-
Model.get_objs(ngr_mh,sp_hash).size
-
end
-
-
1
class Info
-
1
attr_reader :target_ref,:ref_count
-
1
def initialize(target_ref)
-
@target_ref = target_ref
-
@ref_count = 0
-
end
-
1
def increase_ref_count()
-
@ref_count +=1
-
end
-
end
-
# returns array of Info elements; should only be called on non target ref
-
1
def self.get_linked_target_refs_info(node_instance)
-
get_ndx_linked_target_refs_info([node_instance]).values.first||[]
-
end
-
-
1
private
-
1
def self.get_ndx_linked_target_refs_info(node_instances)
-
ret = Hash.new
-
if node_instances.empty?
-
return ret
-
end
-
sp_hash = {
-
:cols => [:node_group_id,:target_refs_with_links],
-
:filter => [:oneof,:node_group_id,node_instances.map{|n|n[:id]}]
-
}
-
ndx_ret = Hash.new
-
ngr_mh = node_instances.first.model_handle(:node_group_relation)
-
get_objs(ngr_mh,sp_hash).each do |r|
-
node_id = r[:node_group_id]
-
second_ndx = r[:target_ref].id
-
info = (ndx_ret[node_id] ||= Hash.new)[second_ndx] ||= Info.new(r[:target_ref])
-
info.increase_ref_count()
-
end
-
ndx_ret.inject(Hash.new){|h,(node_id,ndx_info)|h.merge(node_id => ndx_info.values)}
-
end
-
-
# returns hash of form {TargetRefId => [matching_node_instance1,,],}
-
1
def self.ndx_target_refs_to_their_instances(node_target_ref_idhs)
-
ret = Hash.new
-
return ret if node_target_ref_idhs.empty?
-
# object model structure that relates instance to target refs is where instance's :canonical_template_node_id field point to target_ref
-
sp_hash = {
-
:cols => [:id, :display_name,:canonical_template_node_id],
-
:filter => [:oneof,:canonical_template_node_id,node_target_ref_idhs.map{|idh|idh.get_id()}]
-
}
-
Log.error("see why this is using :canonical_template_node_id and not node_group_relation")
-
node_mh = node_target_ref_idhs.first.createMH()
-
get_objs(node_mh,sp_hash).each do |r|
-
(ret[r[:canonical_template_node_id]] ||= Array.new) << r
-
end
-
ret
-
end
-
end
-
end
-
end
-
2
module DTK; class Node
-
1
class TargetRef
-
# Clone has methods used when staging (cloning) taht involves target refs
-
1
class Clone
-
1
def initialize(target,assembly,nodes)
-
@target = target
-
@assembly = assembly
-
@nodes = nodes
-
end
-
# this creates needed target refs and their links to them
-
# there are a number of cases treated on a node by node basis (i.e., member of nodes)
-
# if node is a group then creating new target refs for it as function of its cardinality
-
# if node has been designated as matched to an existing target ref, need to create links to these
-
# otherwise returns a state change object in teh output array
-
1
def create_target_refs_and_links?()
-
tr_create = Array.new #node/node-groups that need target ref created
-
tr_link = Hash.new #node/node-groups that need to be linked to existing target refs
-
tr_link_candidates = Array.new
-
-
# ndx_needs_sc is used to find nodes that need a state change object
-
# meaning model is annoatted so these when a task is run will cause a node to be created
-
# initiallly set ndx_needs_state_change to have all nodes and then in loop below remove ones
-
# that are linked to existing nodes
-
ndx_needs_sc = Hash.new
-
@nodes.each do |node|
-
if node.is_node_group?() and !node[:target_refs_exist]
-
tr_create << node
-
else
-
tr_link_candidates << node
-
end
-
# initiallly set ndx_needs_state_change to have all nodes
-
ndx_needs_sc.merge!(node[:id] => node)
-
end
-
-
Input::BaseNodes.create_linked_target_refs?(@target,@assembly,tr_create)
-
-
to_link_array = existing_target_refs_to_link(tr_link_candidates,ndx_needs_sc)
-
link_to_target_refs(to_link_array)
-
-
# needed target_ref state changes
-
ndx_needs_sc.reject{|node,needs_sc|!needs_sc}.values
-
end
-
-
1
private
-
1
ToLinkElement = Struct.new(:node_instance_id,:target_ref)
-
# This method returns array of
-
# and also updates ndx_needs_sc
-
1
def existing_target_refs_to_link(tr_link_candidates,ndx_needs_sc)
-
ret = Array.new
-
return ret if tr_link_candidates.empty?
-
# See if nodes have target refs computed already; if so compute these
-
# TODO: convert so that always case target refs computed already
-
trs_that_need_processing = Array.new
-
tr_link_candidates.each do |node|
-
trs = node[:target_refs_to_link]||[]
-
unless trs.empty?
-
node_id = node[:id]
-
ret += trs.map{|target_ref|ToLinkElement.new(node_id,target_ref)}
-
else
-
trs_that_need_processing << node
-
end
-
end
-
-
return ret if trs_that_need_processing.empty?
-
-
# TODO: after 'convert so that always case' can remove below
-
ndx_node_template__node = trs_that_need_processing.inject(Hash.new) do |h,n|
-
n[:node_template_id] ? h.merge!(n[:node_template_id] => n[:id]) : h
-
end
-
unless ndx_node_template__node.empty?
-
sp_hash = {
-
:cols => [:id,:display_name,:type],
-
:filter => [:oneof,:id,ndx_node_template__node.keys]
-
}
-
Model.get_objs(@target.model_handle(:node),sp_hash).each do |nt|
-
if nt.is_target_ref?()
-
node_id = ndx_node_template__node[nt[:id]]
-
ret << ToLinkElement.new(node_id,nt)
-
ndx_needs_sc[node_id] = nil
-
end
-
end
-
end
-
ret
-
end
-
-
# This creates links between node instances and target refs
-
# to_link_array is array of ToLinkElements
-
1
def link_to_target_refs(to_link_array)
-
return if to_link_array.empty?
-
create_ngrs_objs_hash = to_link_array.inject(Hash.new) do |h,to_link_el|
-
h.merge(Input::BaseNodes.target_ref_link_hash(to_link_el.node_instance_id,to_link_el.target_ref.id))
-
end
-
create_objs_hash = {:node_group_relation => create_ngrs_objs_hash}
-
Model.input_hash_content_into_model(@target.id_handle(),create_objs_hash)
-
end
-
end
-
end
-
end; end
-
# For populating target refs from different input sources
-
2
module DTK; class Node
-
1
class TargetRef
-
1
class Input < Array
-
1
r8_nested_require('input','inventory_data')
-
1
r8_nested_require('input','base_nodes')
-
-
1
def self.create_nodes_from_inventory_data(target, inventory_data)
-
inventory_data.create_nodes_from_inventory_data(target)
-
end
-
-
#TODO: collapse with application/utility/library_nodes - node_info
-
1
def self.child_objects(params={})
-
{
-
"attribute"=> {
-
"host_addresses_ipv4"=>{
-
"required"=>false,
-
"read_only"=>true,
-
"is_port"=>true,
-
"cannot_change"=>false,
-
"data_type"=>"json",
-
"value_derived"=>[params["host_address"]],
-
"semantic_type_summary"=>"host_address_ipv4",
-
"display_name"=>"host_addresses_ipv4",
-
"dynamic"=>true,
-
"hidden"=>true,
-
"semantic_type"=>{":array"=>"host_address_ipv4"}
-
},
-
"fqdn"=>{
-
"required"=>false,
-
"read_only"=>true,
-
"is_port"=>true,
-
"cannot_change"=>false,
-
"data_type"=>"string",
-
"display_name"=>"fqdn",
-
"dynamic"=>true,
-
"hidden"=>true,
-
},
-
"node_components"=>{
-
"required"=>false,
-
"read_only"=>true,
-
"is_port"=>true,
-
"cannot_change"=>false,
-
"data_type"=>"json",
-
"display_name"=>"node_components",
-
"dynamic"=>true,
-
"hidden"=>true,
-
}
-
},
-
"node_interface"=>{
-
"eth0"=>{"type"=>"ethernet", "display_name"=>"eth0"}
-
}
-
}
-
end
-
end
-
end
-
end; end
-
-
3
module DTK; class Node; class TargetRef
-
1
class Input
-
1
class BaseNodes < self
-
1
r8_nested_require('base_nodes','element')
-
-
#This creates if needed a new target ref, links node to it and moves the node's attributes to the target ref
-
1
def self.create_linked_target_ref?(target,node,assembly)
-
ndx_node_target_ref_array = create_linked_target_refs?(target,assembly,[node])
-
unless target_ref_array = ndx_node_target_ref_array[node[:id]]
-
raise Error.new("Unexpected that create_linked_target_ref does not return element matching node[:id]")
-
end
-
unless target_ref_array.size == 1
-
raise Error.new("Unexpected that ndx_node_target_ref_array.size not equal 1")
-
end
-
target_ref = target_ref_array.first.create_object()
-
# TODO: can be more efficienct and avoid calling below if create_linked_target_refs? finds as opposed to creates
-
# target refs
-
move_node_attributes_to_target_refs(target,[{:node_instance => node,:target_ref => target_ref}])
-
target_ref
-
end
-
-
# TODO: need better name for create_linked_target_ref? vs create_linked_target_refs?
-
# since different in what they do with node attributes
-
-
# This creates if needed target refs and links nodes to them
-
# returns new idhs indexed by node (id) they linked to
-
# or if they exist their idhs
-
# for any node that is node group, this copies the node group's attributes to the target refs
-
1
def self.create_linked_target_refs?(target,assembly,nodes,opts={})
-
ret = Hash.new
-
return ret if nodes.empty?
-
ndx_target_ref_idhs = TargetRef.ndx_matching_target_ref_idhs(:node_instance_idhs => nodes.map{|n|n.id_handle})
-
-
create_objs_hash = Hash.new
-
nodes.each do |node|
-
node_id = node[:id]
-
cardinality = opts[:new_cardinality]||node.attribute.cardinality
-
target_ref_idhs = ndx_target_ref_idhs[node_id]||[]
-
num_existing = target_ref_idhs.size
-
num_needed = cardinality - num_existing
-
if num_needed > 0
-
el = Element.new(:node => node,:num_needed => num_needed,:offset => num_existing+1)
-
el.add_target_ref_and_ngr!(create_objs_hash,target,assembly)
-
elsif num_needed == 0
-
if cardinality > 0
-
ret.merge!(node_id => target_ref_idhs)
-
end
-
else # num_needed < 0
-
Log.error("Unexpected that more target refs than needed")
-
ret.merge!(node_id => target_ref_idhs)
-
end
-
end
-
-
unless create_objs_hash.empty?
-
all_idhs = Model.input_hash_content_into_model(target.id_handle(),create_objs_hash,:return_idhs => true)
-
#all idhs have both nodes and node_group_rels
-
ngr_idhs = all_idhs.select{|idh|idh[:model_name] == :node_group_relation}
-
# copy from node group to target refs
-
copy_node_attributes?(target,nodes,ngr_idhs)
-
ret.merge!(TargetRef.ndx_matching_target_ref_idhs(:node_group_relation_idhs => ngr_idhs))
-
end
-
ret
-
end
-
-
1
private
-
# to_link_array is of form [{:node_instance => node,:target_ref => target_ref},..]
-
1
def self.move_node_attributes_to_target_refs(target,to_link_array)
-
return if to_link_array.empty?
-
sp_hash = {
-
:cols => [:id,:display_name,:node_node_id],
-
:filter => [:oneof,:node_node_id,to_link_array.map{|n|n[:node_instance].id()}]
-
}
-
attr_mh = target.model_handle(:attribute)
-
attrs = Model.get_objs(attr_mh,sp_hash)
-
return if attrs.empty?
-
to_link_hash = to_link_array.inject(Hash.new){|h,r|h.merge(r[:node_instance].id => r[:target_ref].id)}
-
rows_to_update = attrs.map do |r|
-
{:id => r[:id], :node_node_id => to_link_hash[r[:node_node_id]]}
-
end
-
Log.error("need to also update top.id_info since parent field is being updated")
-
Model.update_from_rows(attr_mh,rows_to_update)
-
end
-
# TODO: Step in fixing DTK-1739 is putting in this copy to possible replace above Not switching over yet
-
# in create_linked_target_ref? in master branch until make sure that this does not impact node groups
-
# to_link_array is of form [{:node_instance => node,:target_ref => target_ref},..]
-
1
def self.copy_node_attributes_to_target_refs(target,to_link_array)
-
return if to_link_array.empty?
-
cols = Model::FieldSet.all_real(:attribute).with_removed_cols(:id,:local_id).cols
-
sp_hash = {
-
:cols => cols,
-
:filter => [:oneof,:node_node_id,to_link_array.map{|n|n[:node_instance].id()}]
-
}
-
attr_mh = target.model_handle(:node).create_childMH(:attribute)
-
attrs = Model.get_objs(attr_mh,sp_hash,:keep_ref_cols => true)
-
return if attrs.empty?
-
to_link_hash = to_link_array.inject(Hash.new){|h,r|h.merge(r[:node_instance].id => r[:target_ref].id)}
-
-
create_rows = attrs.map do |a|
-
target_ref_id = to_link_hash[a[:node_node_id]]
-
el = Hash.new
-
# copy with some special processing
-
a.each do |k,v|
-
if k == :id
-
#dont copy
-
elsif k == :node_node_id
-
el.merge!(k => target_ref_id)
-
elsif v.nil?
-
#dont copy
-
else
-
el.merge!(k => v)
-
end
-
end
-
el
-
end
-
Model.create_from_rows(attr_mh,create_rows,:convert => true)
-
end
-
-
# copy node attributes from node group to target refs
-
1
def self.copy_node_attributes?(target,nodes,ngr_idhs)
-
node_groups = nodes.select{|n|n.is_node_group?()}
-
return if node_groups.empty?
-
-
ng_idhs = node_groups.map{|ng|ng.id_handle()}
-
ndx_ng_target_ref_attrs = Hash.new
-
ServiceNodeGroup.get_node_attributes_to_copy(ng_idhs).each do |ng_attr|
-
node_group_id = ng_attr.delete(:node_node_id)
-
-
target_ref_attr = Hash.new
-
ng_attr.each do |field,val|
-
if field == :type
-
target_ref_attr[field] = Node::Type.target_ref
-
else
-
#remove nil fields
-
target_ref_attr[field] = val unless val.nil?
-
end
-
end
-
-
(ndx_ng_target_ref_attrs[node_group_id] ||= Array.new) << target_ref_attr
-
end
-
-
sp_hash = {
-
:cols => [:node_group_id,:target_ref],
-
:filter => [:oneof,:id,ngr_idhs.map{|idh|idh.get_id()}]
-
}
-
ngr_mh = target.model_handle(:node_group_relation)
-
create_rows = Array.new
-
Model.get_objs(ngr_mh,sp_hash).each do |ngr|
-
node_group_id = ngr[:node_group_id]
-
unless target_ref_attrs = ndx_ng_target_ref_attrs[ngr[:node_group_id]]
-
Log.error("Unexpected that node group id is not found in node_group_refs")
-
next
-
end
-
add_target_ref_attrs!(create_rows,ngr[:target_ref],target_ref_attrs)
-
end
-
attr_mh = node_groups.first.model_handle.create_childMH(:attribute)
-
ndx_create_rows = Hash.new
-
create_rows.each do |r|
-
ndx = r[:display_name]
-
(ndx_create_rows[ndx] ||= Array.new) << r
-
end
-
ndx_create_rows.values.each{|rows| Model.create_from_rows(attr_mh,rows,:convert => true)}
-
nil
-
end
-
-
1
def self.add_target_ref_attrs!(create_rows,target_ref,target_ref_attrs)
-
target_ref_id = target_ref.id
-
target_ref_attrs.each do |attr|
-
attr = attr.merge(:node_node_id => target_ref_id)
-
# any special processing for :value_asserted or :value_derived
-
case attr[:display_name]
-
when 'name'
-
# gsub is to strip off leading assembly name (if present)
-
attr[:value_asserted] = target_ref[:display_name].gsub(/^.+::/,'')
-
end
-
create_rows << attr
-
end
-
end
-
-
# node_instance and target_ref can be ids or be uri paths
-
1
def self.target_ref_link_hash(node_instance,target_ref)
-
hash = Link.attr_asignment(:node_group_id,node_instance).merge(Link.attr_asignment(:node_id,target_ref))
-
{Link.ref(node_instance,target_ref) => hash}
-
end
-
1
module Link
-
1
def self.attr_asignment(attr_name,val)
-
{(val.kind_of?(Fixnum) ? attr_name.to_s : "*#{attr_name}") => val}
-
end
-
1
def self.ref(node_instance,target_ref)
-
"#{target_ref.to_s}--#{node_instance.to_s}"
-
end
-
end
-
-
end
-
end
-
end; end; end
-
3
module DTK; class Node; class TargetRef
-
2
class Input; class BaseNodes
-
1
class Element
-
1
attr_reader :node,:num_needed
-
1
def initialize(node_info)
-
@node = node_info[:node]
-
@num_needed = node_info[:num_needed]
-
@offset = node_info[:offset]||1
-
@type = :base_node_link
-
end
-
-
1
def add_target_ref_and_ngr!(ret,target,assembly)
-
target_ref_hash = target_ref_hash(target,assembly)
-
unless target_ref_hash.empty?
-
(ret[:node] ||= Hash.new).merge!(target_ref_hash)
-
node_group_rel_hash = target_ref_hash.keys.inject(Hash.new) do |h,node_ref|
-
h.merge(BaseNodes.target_ref_link_hash(@node.id,"/node/#{node_ref}"))
-
end
-
(ret[:node_group_relation] ||= Hash.new).merge!(node_group_rel_hash)
-
end
-
ret
-
end
-
-
1
def target_ref_hash(target,assembly)
-
ret = Hash.new
-
unless display_name = @node.get_field?(:display_name)
-
raise Error.new("Unexpected that that node has no name field")
-
end
-
external_ref = @node.external_ref
-
(@offset...(@offset+@num_needed)).inject(Hash.new) do |h,index|
-
hash = {
-
:display_name => ret_display_name(display_name,:index => index),
-
:os_type => @node.get_field?(:os_type),
-
:type => Type::Node.target_ref_staged,
-
:external_ref => external_ref.hash()
-
}
-
ref = ret_ref(display_name,:index => index,:assembly => assembly)
-
h.merge(ref => hash)
-
end
-
end
-
-
1
private
-
1
def ret_display_name(name,opts={})
-
TargetRef.ret_display_name(@type,name,opts)
-
end
-
1
def ret_ref(name,opts={})
-
"#{@type}--#{ret_display_name(name,opts)}"
-
end
-
end
-
end; end
-
end; end; end
-
3
module DTK; class Node; class TargetRef
-
1
class Input
-
1
class InventoryData < self
-
1
r8_nested_require('inventory_data','element')
-
-
1
def initialize(inventory_data_hash)
-
super()
-
inventory_data_hash.each{|ref,hash| self << Element.new(ref,hash)}
-
end
-
-
1
def create_nodes_from_inventory_data(target)
-
target_ref_hash = target_ref_hash()
-
target_idh = target.id_handle()
-
Model.import_objects_from_hash(target_idh, {:node => target_ref_hash}, :return_info => true)
-
end
-
-
1
def self.pbuilderid?(node_external_ref)
-
node_external_ref ||= Hash.new
-
if host_address = node_external_ref[:routable_host_address]||node_external_ref['routable_host_address']
-
"#{TargetRef.physical_node_prefix()}#{host_address}"
-
end
-
end
-
-
1
private
-
1
def target_ref_hash()
-
inject(Hash.new){|h,el|h.merge(el.target_ref_hash())}
-
end
-
end
-
end
-
end; end; end
-
-
-
3
module DTK; class Node; class TargetRef
-
2
class Input; class InventoryData
-
#TODO: this is just temp until move from client formating data; right now hash is of form
-
# {"physical--install-agent1"=>
-
# {"display_name"=>"install-agent1",
-
# "os_type"=>"ubuntu",
-
# "managed"=>"false",
-
# "external_ref"=>
-
1
class Element < Hash
-
1
def initialize(ref,hash)
-
super()
-
if ref =~ Regexp.new("^#{TargetRef.physical_node_prefix()}")
-
replace(hash)
-
@type = :physical
-
else
-
raise Error.new("Unexpected ref for inventory data ref: #{ref}")
-
end
-
end
-
-
1
def target_ref_hash()
-
unless name = self['name']||self['display_name']
-
raise Error.new("Unexpected that that element (#{inspect}) has no name field")
-
end
-
ret_hash = merge('display_name' => ret_display_name(name))
-
-
external_ref = self['external_ref']||{}
-
ret_hash.merge!(:type => external_ref['type']||Type::Node.target_ref)
-
-
host_address = nil
-
if @type == :physical
-
unless host_address = external_ref['routable_host_address']
-
raise Error.new("Missing field input_node_hash['external_ref']['routable_host_address']")
-
end
-
end
-
params = {"host_address" => host_address}
-
ret_hash.merge!(Input.child_objects(params))
-
{ret_ref(name) => ret_hash}
-
end
-
-
1
private
-
1
def ret_display_name(name)
-
TargetRef.ret_display_name(@type,name)
-
end
-
1
def ret_ref(name)
-
"#{@type}--#{name}"
-
end
-
end
-
end; end
-
end; end; end
-
-
-
1
module DTK
-
1
class Node
-
1
class Template < self
-
1
r8_nested_require('template','factory')
-
-
1
def self.create_or_update_node_template(target,node_template_name,image_id,opts={})
-
Factory.create_or_update(target,node_template_name,image_id,opts)
-
end
-
-
1
def self.delete_node_template(node_binding_ruleset)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:eq,:node_binding_rs_id,node_binding_ruleset.id]
-
}
-
node_images = get_objs(node_binding_ruleset.model_handle(:node),sp_hash)
-
unless node_images.size == 1
-
Log.error("Unexpected that there are (#{node_images.size}) node images that match #{node_binding_ruleset.get_field?(:ref)}")
-
end
-
node_images.map{|n|delete_instance(n.id_handle())}
-
delete_instance(node_binding_ruleset.id_handle())
-
end
-
-
1
def self.list(model_handle,opts={})
-
ret = Array.new
-
node_bindings = nil
-
-
if opts[:target_id]
-
sp_hash = { :cols => [:node_bindings], :filter => [:eq, :datacenter_datacenter_id, opts[:target_id].to_i]}
-
node_bindings = get_objs(model_handle.createMH(:node), sp_hash)
-
unq_bindings = node_bindings.inject({}) { |tmp,nb| tmp.merge(nb[:node_binding_rs_id] => nb[:node_binding_ruleset])}
-
node_bindings = unq_bindings.values
-
elsif opts[:is_list_all] and opts[:is_list_all].to_s == "true"
-
sp_hash = { :cols => [:node_bindings], :filter => [:neq, :datacenter_datacenter_id, nil]}
-
node_bindings = get_objs(model_handle.createMH(:node), sp_hash)
-
unq_bindings = node_bindings.inject({}) { |tmp,nb| tmp.merge(nb[:node_binding_rs_id] => nb[:node_binding_ruleset])}
-
node_bindings = unq_bindings.values
-
else
-
sp_hash = {
-
:cols => [:id,:ref,:display_name,:rules,:os_type]
-
}
-
sp_hash.merge!(:filter => opts[:filter]) if opts[:filter]
-
node_bindings = get_objs(model_handle.createMH(:node_binding_ruleset),sp_hash,:keep_ref_cols => true)
-
end
-
-
node_bindings.each do |nb|
-
# TODO: fix so that have a unique id for each
-
unique_id = ((nb[:rules].size == 1) && nb[:id])
-
nb[:rules].each do |r|
-
# Amar & Haris: Skipping node template in case when target name filter is sent in method request from CLI
-
next if (opts[:target_id] && r[:datacenter_datacenter_id] == opts[:target_id].to_i)
-
el = {
-
:display_name => nb[:display_name]||nb[:ref], #TODO: may just use display_name after fill in this column
-
:os_type => nb[:os_type],
-
}.merge(r[:node_template])
-
el.merge!(:id => unique_id) if unique_id
-
ret << el
-
end
-
end
-
ret.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def self.image_type(target)
-
"#{target.iaas_properties.type()}_image"
-
end
-
-
1
def self.get_public_library(model_handle)
-
Library.get_public_library(model_handle.createMH(:library))
-
end
-
1
private_class_method :get_public_library
-
-
1
def self.legal_os_identifiers(model_handle)
-
public_library = get_public_library(model_handle)
-
sp_hash = {
-
:cols => [:id,:os_identifier],
-
:filter => [:and,[:eq,:type,"image"],[:eq,:library_library_id,public_library[:id]]]
-
}
-
get_images(model_handle).map{|r|r[:os_identifier]}.compact.uniq
-
end
-
-
1
def self.get_images(model_handle)
-
public_library = Library.get_public_library(model_handle.createMH(:library))
-
sp_hash = {
-
:cols => [:id,:group_id,:os_identifier,:external_ref],
-
:filter => [:and,[:eq,:type,"image"],[:eq,:library_library_id,public_library[:id]]]
-
}
-
get_objs(model_handle.createMH(:node),sp_hash)
-
end
-
1
private_class_method :get_images
-
-
# returns [image_id, os_type]
-
1
def self.find_image_id_and_os_type(os_identifier,target)
-
opts_get = {
-
:cols => [:id,:group_id,:rules,:os_type],
-
:filter => [:eq,:os_identifier,os_identifier]
-
}
-
ret = nil
-
get_node_binding_rulesets(target,opts_get).find do |nb_rs|
-
if matching_rule = CommandAndControl.find_matching_node_binding_rule(nb_rs[:rules],target)
-
ret = [matching_rule[:node_template][:image_id],nb_rs[:os_type]]
-
end
-
end
-
ret
-
end
-
-
1
def self.get_matching_node_binding_rules(target,opts={})
-
ret = Array.new
-
get_node_binding_rulesets(target,opts).each do |nb_rs|
-
if matching_rule = CommandAndControl.find_matching_node_binding_rule(nb_rs[:rules],target)
-
ret << nb_rs.merge(:matching_rule => matching_rule)
-
end
-
end
-
ret
-
end
-
-
1
def self.get_node_binding_rulesets(target,opts={})
-
public_library = Library.get_public_library(target.model_handle(:library))
-
filter = [:eq,:library_library_id,public_library.id()]
-
if opts[:filter]
-
filter = [:and,filter,opts[:filter]]
-
end
-
sp_hash = {
-
:cols => opts[:cols]||(NodeBindingRuleset.common_columns + [:ref]),
-
:filter => filter
-
}
-
get_objs(target.model_handle(:node_binding_ruleset),sp_hash,:keep_ref_cols => true)
-
end
-
1
private_class_method :get_node_binding_rulesets
-
-
1
def self.legal_memory_sizes(model_handle)
-
public_library = Library.get_public_library(model_handle.createMH(:library))
-
sp_hash = {
-
:cols => [:id,:external_ref],
-
:filter => [:and,[:eq,:type,"image"],[:eq,:library_library_id,public_library[:id]]]
-
}
-
get_objs(model_handle.createMH(:node),sp_hash).map do |r|
-
if external_ref = r[:external_ref]
-
external_ref[:size]
-
end
-
end.compact.uniq
-
end
-
-
1
def self.find_matching_node_template(target,opts={})
-
if node_target = opts[:node_target]
-
pp [:node_target,node_target]
-
raise Error.new("here need to write code that uses node_target to return results")
-
end
-
-
node_binding_rs = opts[:node_binding_ruleset]
-
ret = node_binding_rs && node_binding_rs.find_matching_node_template(target)
-
ret || null_node_template(target.model_handle(:node))
-
end
-
-
1
def self.null_node_template(model_handle)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:eq,:display_name, "null-node-template"]
-
}
-
node_mh = model_handle.createMH(:node)
-
get_obj(node_mh,sp_hash)
-
end
-
1
private_class_method :null_node_template
-
-
1
def self.image_upgrade(model_handle,old_image_id,new_image_id)
-
nb_mh = model_handle.createMH(:node_binding_ruleset)
-
matching_node_bindings = get_objs(nb_mh,:cols => [:id,:rules]).select do |nb|
-
nb[:rules].find{|r|r[:node_template][:image_id] == old_image_id}
-
end
-
if matching_node_bindings.empty?
-
raise ErrorUsage.new("Cannot find reference to image_id (#{old_image_id})")
-
end
-
-
image_type = matching_node_bindings.first[:rules].first[:node_template][:type].to_sym
-
-
# TODO: commented out below until can use new signature where pass in target to
-
# get context, which includes image_type and if ec2 region
-
# unless CommandAndControl.existing_image?(new_image_id,image_type)
-
# raise ErrorUsage.new("Image id (#{new_image_id}) does not exist")
-
# end
-
-
# update daatstructute than model
-
matching_node_bindings.each do |nb|
-
nb[:rules].each do |r|
-
nt = r[:node_template]
-
if nt[:image_id] == old_image_id
-
nt[:image_id] = new_image_id
-
end
-
end
-
end
-
update_from_rows(nb_mh,matching_node_bindings)
-
-
# find and update nodes that are images
-
sp_hash = {
-
:cols => [:id,:external_ref],
-
:filter => [:eq, :type, "image"]
-
}
-
matching_images = get_objs(model_handle,sp_hash).select do |r|
-
r[:external_ref][:image_id] == old_image_id
-
end
-
unless matching_images.empty?
-
matching_images.each{|r|r[:external_ref][:image_id] = new_image_id}
-
update_from_rows(model_handle,matching_images)
-
end
-
end
-
end
-
end
-
end
-
2
module DTK; class Node
-
1
class Template
-
1
class Factory < self
-
1
def self.create_or_update(target,node_template_name,image_id,opts={})
-
raise_error_if_invalid_image(image_id,target)
-
raise_error_if_invalid_os(opts[:operating_system])
-
size_array = raise_error_if_invalid_size_array(opts[:size_array])
-
-
hash_content = {
-
:node => Hash.new,
-
:node_binding_ruleset => Hash.new
-
}
-
size_array.each do |size|
-
factory = new(target,node_template_name,image_id,size,opts)
-
nbrs_factory = NodeBindingRuleset::Factory.new(factory)
-
-
hash_content[:node_binding_ruleset].merge!(nbrs_factory.create_or_update_hash())
-
hash_content[:node].merge!(factory.node_template(nbrs_factory))
-
end
-
-
public_library_idh = get_public_library(target.model_handle()).id_handle()
-
Model.import_objects_from_hash(public_library_idh,hash_content)
-
end
-
-
1
attr_reader :target,:image_id,:os_identifier,:os_type,:size
-
-
1
def initialize(target,os_identifier,image_id,size,opts={})
-
@target = target
-
@image_id = image_id
-
@os_identifier = os_identifier
-
@os_type = opts[:operating_system]
-
@size = size
-
end
-
-
1
def node_template(nbrs_factory)
-
hash_body = {
-
:os_type => @os_type,
-
:os_identifier => @os_identifier,
-
:type => 'image',
-
:display_name => node_template_display_name(),
-
:external_ref =>{
-
:image_id => @image_id,
-
:type => node_template_type(),
-
:size => @size
-
},
-
:attribute => {
-
'host_addresses_ipv4' => NodeAttribute::DefaultValue.host_addresses_ipv4(),
-
'fqdn' => NodeAttribute::DefaultValue.fqdn(),
-
'node_components' => NodeAttribute::DefaultValue.node_components()
-
},
-
:node_interface => {'eth0' => {:type => 'ethernet', :display_name => 'eth0'}},
-
"*node_binding_rs_id" => "/node_binding_ruleset/#{nbrs_factory.ref()}"
-
}
-
{node_template_ref() => hash_body}
-
end
-
-
1
private
-
1
def self.raise_error_if_invalid_image(image_id,target)
-
CommandAndControl.raise_error_if_invalid_image?(image_id,target)
-
image_id
-
end
-
-
1
def self.raise_error_if_invalid_os(os)
-
if os.nil?
-
raise ErrorUsage.new("Operating system must be given")
-
end
-
os = os.to_sym
-
unless LegalOSs.include?(os)
-
raise ErrorUsage.new("OS parameter (#{os}) is invalid; legal values are: #{LegalOSs.join(',')}")
-
end
-
os
-
end
-
# TODO: sync with ../utils/internal/command_and_control/install_script.rb OSTemplates keys
-
1
LegalOSs = [:ubuntu,:redhat,:centos,:debian]
-
-
1
def self.raise_error_if_invalid_size_array(size_array)
-
size_array ||= ['t1.micro'] #TODO: stub
-
if size_array.nil?
-
raise ErrorUsage.new("One or more image sizes must be given")
-
end
-
# size_array.each{|image_size|CommandAndControl.raise_error_if_invalid_image_size(image_size,target)}
-
size_array
-
end
-
-
1
def node_template_ref()
-
"#{@image_id}-#{@size}"
-
end
-
1
def node_template_display_name()
-
"#{@os_identifier} #{@size}"
-
end
-
1
def node_template_type()
-
Template.image_type(@target)
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
class Type
-
1
r8_nested_require('type','node')
-
1
r8_nested_require('type','node_group')
-
1
module Mixin
-
1
def is_node?()
-
Type::Node.isa?(get_field?(:type))
-
end
-
1
def is_node_group?()
-
# short circuit
-
return true if (kind_of?(NodeGroup) or kind_of?(ServiceNodeGroup))
-
Type::NodeGroup.isa?(get_field?(:type))
-
end
-
1
def node_group_model_name()
-
unless is_node_group?()
-
raise Error.new("Should not be called if not a node group")
-
end
-
Type::NodeGroup.model_name(get_field?(:type))
-
end
-
end
-
-
1
def self.types()
-
Node.types() + NodeGroup.types()
-
end
-
1
def self.isa?(type)
-
type and types().include?(type.to_sym)
-
end
-
-
1
def self.new_type_when_create_node(node)
-
type = node.get_field?(:type)
-
ret =
-
case type
-
when Node.staged then Node.instance
-
when Node.target_ref_staged then Node.target_ref
-
end
-
unless ret
-
Log.error("Unexpected type on node being created: #{type}")
-
#best guess so does not completely fail
-
ret = (node.is_node_group? ? NodeGroup.instance : Node.instance)
-
end
-
ret
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
class Type
-
1
class Node < self
-
1
Types =
-
[
-
:stub, # - in an assembly template
-
:image, # - corresponds to an IAAS, hyperviser or container image
-
:instance, # - in a service instance where it correspond to an actual node
-
:staged, # - in a service instance before actual node correspond to it
-
:target_ref, # - target_ref to actual node
-
:target_ref_staged, # - target_ref to node not created yet
-
:physical, # - target_ref that corresponds to a physical node
-
:assembly_wide # - assembly_wide node hidden from context
-
]
-
1
Types.each do |type|
-
8
class_eval("def self.#{type}(); '#{type}'; end")
-
end
-
1
def self.types()
-
Types
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Node
-
1
class Type
-
1
class NodeGroup < self
-
1
Types =
-
[
-
:stub, # - in an assembly template
-
:instance, # - in a service instance where actual nodes correspond to it
-
:staged # - in a service instance before actual nodes correspond to it
-
]
-
1
def self.types()
-
@types ||= Types.map{|r|type_from_name(r)}
-
end
-
-
1
def self.model_name(type)
-
case type.to_sym
-
when :node_group_stub,:node_group_staged then :service_node_group
-
when :node_group_instance then :node_group
-
else raise Error.new("Unexpected node group type (#{type})")
-
end
-
end
-
-
1
private
-
1
def self.type_from_name(type_name)
-
3
"node_group_#{type_name}".to_sym
-
end
-
1
Types.each do |type_name|
-
3
class_eval("def self.#{type_name}(); '#{type_from_name(type_name)}'; end")
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class NodeBindingRuleset < Model
-
1
r8_nested_require('node_binding_ruleset','factory')
-
-
1
def self.common_columns()
-
2
[:id,:display_name,:type,:os_type,:rules, :ref]
-
end
-
-
1
def self.check_valid_id(model_handle,id)
-
check_valid_id_default(model_handle,id)
-
end
-
-
1
def self.name_to_id(model_handle, name)
-
return name.to_i if name.match(/^[0-9]+$/)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:eq, :ref, name]
-
}
-
name_to_id_helper(model_handle,name,sp_hash)
-
end
-
-
1
def self.object_type_string()
-
"node template"
-
end
-
-
1
def find_matching_node_template(target)
-
match = CommandAndControl.find_matching_node_binding_rule(get_field?(:rules),target)
-
match && get_node_template(match[:node_template])
-
end
-
-
1
def clone_or_match(target,opts={})
-
update_object!(:type,:rules,:ref)
-
case self[:type]
-
when "clone"
-
clone(target,opts)
-
when "match"
-
match(target,opts)
-
else
-
raise Error.new("Unexpected type (#{self[:type]}) in node binding ruleset")
-
end
-
end
-
-
1
def ret_common_fields_or_that_varies()
-
ret = Hash.new
-
return ret unless self[:rules]
-
first_time = true
-
self[:rules].each do |rule|
-
nt = rule[:node_template]
-
RuleSetFields.each do |k|
-
if ret[k] == :varies
-
# no op
-
elsif ret[k]
-
ret[k] = :varies if ret[k] != nt[k]
-
elsif first_time
-
ret[k] = nt[k]
-
else
-
ret[k] = :varies
-
end
-
end
-
first_time = false
-
end
-
ret
-
end
-
1
RuleSetFields = [:type,:image_id,:region,:size]
-
-
1
private
-
1
def match(target,opts={})
-
raise Error.new("TODO: not implemented yet")
-
end
-
-
1
def clone(target,opts={})
-
node_template = find_matching_node_template(target)
-
override_attrs = opts[:override_attrs]||Hash.new
-
-
# special processing of :display_name
-
display_name = override_attrs[:display_name]||get_field?(:ref)
-
override_attrs.merge!(:display_name => Node::Instance.get_unique_instance_name(model_handle(:node),display_name))
-
-
clone_opts = node_template.source_clone_info_opts()
-
new_obj = target.clone_into(node_template,override_attrs,clone_opts)
-
new_obj && new_obj.id_handle()
-
end
-
-
1
def get_node_template(node_template_ref)
-
sp_hash = {
-
:cols => [:id, :display_name, :external_ref, :group_id],
-
:filter => [:and, [:eq,:node_binding_rs_id,id()], [:eq,:type,"image"]]
-
}
-
ret = Model.get_objs(id_handle.createMH(:node),sp_hash).find{|r|r[:external_ref][:image_id] == node_template_ref[:image_id]}
-
raise Error.new("Cannot find associated node template") unless ret
-
ret
-
end
-
end
-
end
-
-
2
module DTK; class NodeBindingRuleset
-
1
class Factory
-
1
def initialize(top_factory)
-
@top_factory = top_factory
-
@os_type = top_factory.os_type
-
@os_identifier = top_factory.os_identifier
-
@size = top_factory.size
-
end
-
-
1
def create_or_update_hash()
-
unless matching_nbrs = matching_node_binding_ruleset?()
-
return create_hash()
-
end
-
-
unless @os_type == matching_nbrs[:os_type]
-
node_template_name = @top_factory.os_identifier
-
raise ErrorUsage.new("Node template (#{node_template_name}) exists already and must have os type: #{matching_nbrs[:os_type]}")
-
end
-
if matching_nbrs.find_matching_node_template(@top_factory.target)
-
node_template_name = @top_factory.os_identifier
-
raise ErrorUsage.new("Node template (#{node_template_name}) with size #{@size} exists already")
-
end
-
create_hash(:existing_rules => matching_nbrs[:rules])
-
end
-
-
1
def create_hash(opts={})
-
hash_body = {
-
:type => 'clone',
-
:os_type => @os_type,
-
:os_identifier=> @os_identifier,
-
:rules => (opts[:existing_rules]||[]) + Rules.create(@top_factory)
-
}
-
{ref() => hash_body}
-
end
-
-
1
def ref()
-
#TODO: stub; may want normaized of size form so abstracted from iaas
-
"#{@os_identifier}-#{@size}"
-
end
-
-
1
private
-
1
def matching_node_binding_ruleset?()
-
if @nbrs_calculated
-
@matching_node_binding_ruleset
-
else
-
@nbrs_calculated = true
-
sp_hash = {
-
:cols => NodeBindingRuleset.common_columns(),
-
:filter => [:eq,:ref,ref()]
-
}
-
@matching_node_binding_ruleset = Model.get_obj(model_handle(),sp_hash)
-
end
-
end
-
-
1
def model_handle()
-
@model_handle ||= @top_factory.target.model_handle(:node_binding_ruleset)
-
end
-
-
1
class Rules
-
1
def self.create(top_factory)
-
target = top_factory.target
-
type = Node::Template.image_type(target)
-
region = target.iaas_properties.hash[:region]
-
el = {
-
:conditions => conditions(type,region),
-
:node_template => node_template(top_factory,type,region)
-
}
-
[el]
-
end
-
-
1
def self.conditions(type,region)
-
{
-
:type => type,
-
:region => region
-
}
-
end
-
-
1
def self.node_template(top_factory,type,region)
-
{
-
:type => type,
-
:region=> region,
-
:image_id => top_factory.image_id,
-
:size => top_factory.size
-
}
-
end
-
end
-
end
-
end; end
-
-
-
1
module DTK
-
1
class NodeBindings < Model
-
1
r8_nested_require('node_bindings','content')
-
1
r8_nested_require('node_bindings','parse_input')
-
1
r8_nested_require('node_bindings','dsl')
-
1
r8_nested_require('node_bindings','node_target')
-
1
r8_nested_require('node_bindings','target_specific_info')
-
-
1
def self.set_node_bindings(target,assembly,hash_content)
-
create_from_hash(assembly,hash_content).set_node_bindings(target,assembly)
-
end
-
-
1
def set_node_bindings(target,assembly)
-
# TODO: here or earlier check that bindings in this mention only logical nodes in the assembly
-
content().find_target_specific_info(target).each_pair do |node,target_specific_info|
-
if image_val = target_specific_info.node_target_image?()
-
assembly.set_attribute(assembly_node_attribute(:image,node),image_val, :create=>true)
-
end
-
if size_val = target_specific_info.size()
-
assembly.set_attribute(assembly_node_attribute(:size,node),size_val, :create=>true)
-
end
-
end
-
end
-
-
1
def assembly_node_attribute(type,node)
-
"#{node}/#{MappingToAssemblyAttr[type]}"
-
end
-
1
private :assembly_node_attribute
-
1
MappingToAssemblyAttr = {
-
:image => :os_identifier,
-
:size => :memory_size
-
}
-
-
1
def self.get_node_bindings(assembly_template_idh)
-
sp_hash = {
-
:cols => [:id,:content],
-
:filter => [:eq,:component_component_id,assembly_template_idh.get_id()]
-
}
-
nb_mh = assembly_template_idh.createMH(:node_bindings)
-
get_obj(nb_mh,sp_hash)
-
end
-
-
1
def has_node_target?(assembly_node_name)
-
content().has_node_target?(assembly_node_name)
-
end
-
-
1
def self.create_linked_target_ref?(target,node,node_target)
-
assembly_instance,node_instance = node_target && node_target.find_matching_instance_info(target,node)
-
if node_instance
-
Node::TargetRef::Input::BaseNodes.create_linked_target_ref?(target,node_instance,assembly_instance)
-
end
-
end
-
-
1
private
-
1
def content()
-
if self[:content].kind_of?(Content)
-
self[:content]
-
elsif content_hash = get_field?(:content)
-
self[:content] = Content.parse_and_reify(ParseInput.new(content_hash,:content_field=>true))
-
end
-
end
-
-
#since only one per assembly can use constant
-
1
def self.node_bindings_ref(content)
-
NodeBindingRef
-
end
-
1
NodeBindingRef = 'node_bindings_ref'
-
-
end
-
end
-
1
module DTK
-
1
class NodeBindings
-
1
class Content < Hash
-
1
def has_node_target?(assembly_node_name)
-
self[assembly_node_name.to_sym]
-
end
-
-
1
def find_target_specific_info(target)
-
inject(Hash.new) do |h,(assembly_node_name,node_target)|
-
target_specific_info = node_target.find_target_specific_info(target)
-
h.merge(assembly_node_name => node_target.find_target_specific_info(target))
-
end
-
end
-
-
1
def hash_form()
-
inject(Hash.new) do |h,(node_name,node_target)|
-
h.merge(node_name => node_target.hash_form())
-
end
-
end
-
-
1
def self.parse_and_reify(parse_input)
-
unless parse_input.type?(Hash)
-
raise parse_input.error("Node Bindings section has an illegal form: ?input")
-
end
-
-
if parse_input.input.empty?
-
return nil
-
end
-
-
#TODO: check each node belongs to assembly
-
parse_input.input.inject(new()) do |h,(node,node_target)|
-
h.merge(node => NodeTarget.parse_and_reify(parse_input.child(node_target)))
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class NodeBindings
-
1
class DSL < self
-
1
def self.create_from_hash(assembly,parse_input_hash)
-
parsed_content = parse_content?(parse_input_hash)
-
create_stub(assembly.model_handle(:node_bindings),:content => parsed_content)
-
end
-
-
1
def self.parse!(node_bindings_hash,opts={})
-
return nil unless node_bindings_hash
-
delete_els = opts[:remove_non_legacy]
-
parse_input_hash = Hash.new
-
node_bindings_hash.each_pair do |node,node_target|
-
unless node_target.kind_of?(String) and not node_target =~ /\//
-
parse_input_hash[node] = (delete_els ? node_bindings_hash.delete(node) : node_target)
-
end
-
end
-
if content = parse_content?(parse_input_hash)
-
{node_bindings_ref(content) => {:content => content.hash_form()}}
-
end
-
end
-
-
1
private
-
1
def self.parse_content?(parse_input_hash)
-
Content.parse_and_reify(ParseInput.new(parse_input_hash))
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class NodeBindings
-
1
class NodeTarget
-
1
r8_nested_require('node_target','assembly_node')
-
1
r8_nested_require('node_target','image')
-
-
1
attr_reader :type
-
1
def initialize(type)
-
@type = type
-
end
-
-
1
def self.parse_and_reify(parse_input)
-
AssemblyNode.parse_and_reify(parse_input, :donot_raise_error => true) ||
-
Image.parse_and_reify(parse_input, :donot_raise_error => true) ||
-
raise(parse_input.error("Node Target has illegal form: ?input"))
-
end
-
-
1
def match_or_create_node?(target)
-
:match
-
end
-
end
-
end
-
end
-
2
module DTK; class NodeBindings
-
1
class NodeTarget
-
1
class AssemblyNode < self
-
1
def initialize(hash)
-
super(Type)
-
@assembly_name = hash[:assembly_name]
-
@assembly_name_internal_form = @assembly_name.gsub(/::/,'/')
-
# TODO: encapsulate sepeartor between service mod and assembly in Assembly::Template
-
@node_name = hash[:node_name]
-
end
-
1
Type = :assembly_node
-
1
def hash_form()
-
{:type => type().to_s, :assembly_name => @assembly_name, :node_name => @node_name}
-
end
-
-
1
def self.parse_and_reify(parse_input,opts={})
-
ret = nil
-
if parse_input.type?(ContentField)
-
input = parse_input.input
-
if input[:type].to_sym == Type
-
ret = new(input)
-
end
-
elsif parse_input.type?(String)
-
input = parse_input.input
-
if input.split('/').size == 3 and input =~ /^assembly\//
-
split = input.split('/')
-
assembly_name = split[1].gsub(/::/,'/')
-
node_name = split[2]
-
ret = new(:assembly_name => assembly_name,:node_name => node_name)
-
end
-
end
-
ret
-
end
-
-
#returns if match [assembly_instance,node_instance]
-
1
def find_matching_instance_info(target,stub_node)
-
#see if in target there is an assembly that matches @assembly
-
assembly_instances = find_matching_assembly_instances(target)
-
if assembly_instances.size == 0
-
Log.info('no node binding matches found')
-
return nil
-
elsif assembly_instances.size > 1
-
Log.info('multiple node binding matches found')
-
return nil
-
end
-
assembly_instance = assembly_instances.first
-
matching_node_instance = assembly_instance.get_nodes().find do |n|
-
n.get_field?(:display_name) == @node_name
-
end
-
unless matching_node_instance
-
raise ErrorUsage.new("Assembly (#{assembly_instance[:display_name]}) does not have node (#{@node_name})")
-
end
-
unless matching_node_instance.get_field?(:type) == 'instance'
-
raise ErrorUsage.new("Assembly (#{assembly_instance[:display_name]}) node (#{@node_name}) cannot be matched because it is just staged")
-
end
-
[assembly_instance,matching_node_instance]
-
end
-
-
1
private
-
1
def find_matching_assembly_instances(target)
-
sp_hash = {
-
:cols => [:id,:display_name,:instance_parent],
-
:filter => [:eq,:datacenter_datacenter_id,target.id()]
-
}
-
Assembly::Instance.get_objs(target.model_handle(:assembly_instance),sp_hash).select do |r|
-
if assembly_template = r[:assembly_template]
-
@assembly_name_internal_form == Assembly::Template.pretty_print_name(assembly_template)
-
end
-
end
-
end
-
-
end
-
end
-
end; end
-
2
module DTK; class NodeBindings
-
1
class NodeTarget
-
1
class Image < self
-
1
attr_reader :image
-
1
def initialize(hash)
-
super(Type)
-
@image = hash[:image]
-
@size = hash[:size]
-
end
-
-
# returns a TargetSpecificObject
-
1
def find_target_specific_info(target)
-
ret = TargetSpecificInfo.new(self)
-
if @image
-
unless image_id = NodeImage.find_iaas_match(target,@image)
-
raise ErrorUsage.new("The image (#{@image}) in the node binding does not exist in the target (#{target.get_field?(:display_name)})")
-
end
-
ret.image_id = image_id
-
end
-
if @size
-
unless iaas_size = NodeImageAttribute::Size.find_iaas_match(target,@size)
-
raise ErrorUsage.new("The size (#{@size}) in the node binding is not valid in the target (#{target.get_field?(:display_name)})")
-
end
-
ret.size = iaas_size
-
end
-
ret
-
end
-
-
1
def hash_form()
-
{:type => type().to_s, :image => @image, :size => @size}
-
end
-
-
1
Type = :image
-
1
Fields = {
-
:image => {
-
:key => 'image',
-
:required => true
-
},
-
:size => {
-
:key => 'size'
-
}
-
}
-
3
InputFormToInternal = Fields.inject(Hash.new){|h,(k,v)|h.merge(v[:key] => k)}
-
3
Allkeys = Fields.values.map{|f|f[:key]}
-
4
RequiredKeys = Fields.values.select{|f|f[:required]}.map{|f|f[:key]}
-
-
1
def self.parse_and_reify(parse_input,opts={})
-
ret = nil
-
if parse_input.type?(ContentField)
-
input = parse_input.input
-
if input[:type].to_sym == Type
-
ret = new(input)
-
end
-
elsif parse_input.type?(Hash)
-
input = parse_input.input
-
if Aux.has_only_these_keys?(input,Allkeys) and ! RequiredKeys.find{|k| !input.has_key?(k)}
-
internal_form_hash = input.inject(Hash.new){|h,(k,v)|h.merge(InputFormToInternal[k] => v)}
-
ret = new(internal_form_hash)
-
end
-
end
-
ret
-
end
-
-
1
def match_or_create_node?(target)
-
:create
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class NodeBindings
-
1
class ParseInput
-
1
attr_reader :input
-
1
def initialize(input,opts={})
-
@input = (opts[:content_field] ? ContentField.new(input) : input)
-
end
-
1
def child(input)
-
self.class.new(input,:content_field => @input.kind_of?(ContentField))
-
end
-
-
1
def type?(klass)
-
@input.kind_of?(klass)
-
end
-
-
1
def error(msg)
-
input_param = ErrorUsage::Parsing::Params.new(:input => @input)
-
ServiceModule::ParsingError.new(msg,input_param)
-
end
-
end
-
-
1
class ContentField < Hash
-
1
def initialize(content_hash)
-
super()
-
replace(content_hash)
-
end
-
end
-
end
-
end
-
-
1
module DTK
-
1
class NodeBindings
-
1
class TargetSpecificInfo
-
1
attr_accessor :image_id,:size
-
1
def initialize(node_target)
-
@node_target = node_target
-
end
-
1
def node_target_image?()
-
@node_target.respond_to?(:image) && @node_target.image
-
end
-
end
-
end
-
end
-
# TODO: need to reconcile or have better names on this versus ServiceNodeGroup
-
1
module DTK
-
# This class represents objects that are group of nodes in a target that are grouped together
-
# they leave seperately from assemblies
-
1
class NodeGroup < Node
-
1
r8_nested_require('node_group','clone')
-
1
include Clone::Mixin
-
-
1
def self.get_component_info_for_action_list(nodes,opts={})
-
ret = opts[:add_on_to]||opts[:seed]||Array.new
-
# TODO: <update-target-node-groups>
-
# currently not using so short circuiting
-
return ret
-
-
return ret if nodes.empty?
-
# find node_to_ng mapping
-
node_filter = opts[:node_filter] || Node::Filter::NodeList.new(nodes.map{|n|n.id_handle()})
-
node_to_ng = get_node_groups_containing_nodes(nodes.first.model_handle(:node_group),node_filter)
-
node_group_ids = node_to_ng.values.map{|r|r.keys}.flatten.uniq
-
sp_hash = {
-
:cols => Node::Instance.component_list_fields() + [:component_list],
-
:filter => [:oneof, :id, node_group_ids + nodes.map{|n|n[:id]}]
-
}
-
rows = get_objs(nodes.first.model_handle(),sp_hash)
-
-
ndx_cmps = Hash.new
-
ndx_node_ng_info = Hash.new
-
rows.each do |r|
-
cmp = r[:component]
-
cmp_id = cmp[:id]
-
ndx_cmps[cmp_id] ||= cmp
-
pntr = ndx_node_ng_info[r[:id]] ||= {:node_or_ng => r.hash_subset(:id,:display_name)}
-
(pntr[:component_ids] ||= Array.new) << cmp_id
-
end
-
# add titles to components that are non singletons
-
Component::Instance.add_title_fields?(ndx_cmps.values)
-
-
nodes.each do |node|
-
# find components on the node group
-
(node_to_ng[node[:id]]||{}).each_key do |ng_id|
-
if node_ng_info = ndx_node_ng_info[ng_id]
-
node_ng_info[:component_ids].each do |cmp_id|
-
el = ndx_cmps[cmp_id].merge(
-
:node => node,
-
:source => {:type => "node_group", :object => node_ng_info[:node_or_ng]}
-
)
-
ret << el
-
end
-
end
-
end
-
-
# find components on the node
-
((ndx_node_ng_info[node[:id]]||{})[:component_ids]||[]).each do |cmp_id|
-
el = ndx_cmps[cmp_id].merge(
-
:node => node,
-
:source => {:type => "node", :object => node}
-
)
-
ret << el
-
end
-
end
-
-
ret
-
end
-
-
1
def self.create_instance(target_idh,display_name,opts={})
-
create_row = {
-
:ref => display_name,
-
:display_name => display_name,
-
:datacenter_datacenter_id => target_idh.get_id(),
-
:type => "node_group_instance"
-
}
-
ng_mh = target_idh.create_childMH(:node)
-
new_ng_idh = create_from_row(ng_mh,create_row)
-
if opts[:spans_target]
-
NodeGroupRelation.create_to_span_target?(new_ng_idh,target_idh,:donot_check_if_exists => true)
-
end
-
new_ng_idh
-
end
-
-
1
def self.list(model_handle)
-
sp_hash = {
-
:cols => [:id, :display_name, :description],
-
:filter => [:eq, :type, "node_group_instance"]
-
}
-
get_objs(model_handle,sp_hash)
-
end
-
-
# TODO: change to having node group having explicit links or using a saved search
-
1
def get_node_group_members()
-
sp_hash = {
-
:cols => [:node_members]
-
}
-
rows = get_objs(sp_hash)
-
if target_idh = NodeGroupRelation.spans_target?(rows.map{|r|r[:node_group_relation]})
-
target_idh.create_object().get_node_group_members()
-
else
-
rows.map{|r|r[:node_member]}
-
end
-
end
-
-
# returns node group to node mapping for each node matching node filter
-
# for is {node_id => {ng_id1 => ng1,..}
-
# possible that node_id does not appear meaning that this node does not belong to any group
-
# TODO: this can potentially be expensive to compute without enhancements
-
1
def self.get_node_groups_containing_nodes(mh,node_filter)
-
ng_mh = mh.createMH(:node)
-
# TODO: more efficient to push node_filte into sql query
-
sp_hash = {
-
:cols => [:id,:group_id, :display_name,:node_members]
-
}
-
node_to_ng = Hash.new
-
target_nodes = Hash.new
-
get_objs(ng_mh,sp_hash).each do |r|
-
node_group = r.hash_subset(:id,:group_id,:display_name)
-
if target_idh = r[:node_group_relation].spans_target?
-
target_id = target_idh.get_id()
-
target_nodes[target_id] ||= node_filter.filter(target_idh.create_object().get_node_group_members()).map{|n|n[:id]}
-
target_nodes[target_id].each do |n_id|
-
(node_to_ng[n_id] ||= Hash.new)[node_group[:id]] ||= node_group
-
end
-
elsif node_filter.include?(r[:node_member])
-
(node_to_ng[r[:node_member][:id]] ||= Hash.new)[node_group[:id]] ||= node_group
-
end
-
end
-
node_to_ng
-
end
-
-
-
1
def self.check_valid_id(model_handle,id)
-
filter =
-
[:and,
-
[:eq, :id, id],
-
[:eq, :type, "node_group_instance"],
-
[:neq, :datacenter_datacenter_id, nil]]
-
check_valid_id_helper(model_handle,id,filter)
-
end
-
-
1
def self.name_to_id(model_handle,name)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,
-
[:eq, :display_name, name],
-
[:eq, :type, "node_group_instance"],
-
[:neq, :datacenter_datacenter_id, nil]]
-
}
-
name_to_id_helper(model_handle,name,sp_hash)
-
end
-
-
1
def self.id_to_name(model_handle, id)
-
sp_hash = {
-
:cols => [:display_name],
-
:filter => [:and,
-
[:eq, :id, id],
-
[:eq, :type, "node_group_instance"],
-
[:neq, :datacenter_datacenter_id, nil]]
-
}
-
rows_raw = get_objs(model_handle,sp_hash)
-
return rows_raw.first[:display_name]
-
end
-
-
1
def get_canonical_template_node()
-
get_objs(:cols => [:canonical_template_node]).map{|r|r[:template_node]}.first
-
end
-
-
1
def clone_and_add_template_node(template_node)
-
# clone node into node group's target
-
target_idh = id_handle.get_top_container_id_handle(:target,:auth_info_from_self => true)
-
target = target_idh.create_object()
-
cloned_node_id = target.add_item(template_node.id_handle)
-
target.update_ui_for_new_item(cloned_node_id)
-
-
# add node group relationship
-
cloned_node = model_handle(:node).createIDH(:id => cloned_node_id).create_object()
-
add_member(cloned_node,target_idh,:dont_check_redundancy => true)
-
cloned_node.id_handle
-
end
-
-
1
def add_member(instance_node,target_idh,opts={})
-
node_id = instance_node[:id]
-
ng_id = self[:id]
-
# check for redundancy
-
unless opts[:dont_check_redundancy]
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and, [:eq, :node_id, node_id], [:eq, :node_group_id, ng_id]]
-
}
-
redundant_links = Model.get_objs(model_handle(:node_group_relation),sp_hash)
-
raise Error.new("Node already member of node group") unless redundant_links.empty?
-
end
-
# create the node_group_relation item to indicate node group membership
-
create_row = {
-
:ref => "n#{node_id.to_s}-ng#{ng_id.to_s}",
-
:node_id => node_id,
-
:node_group_id => ng_id,
-
:datacenter_datacenter_id => target_idh.get_id
-
}
-
Model.create_from_rows(model_handle(:node_group_relation),[create_row])
-
-
# clone the components and links associated with node group to teh node
-
clone_into_node(instance_node)
-
end
-
-
1
def delete()
-
Model.delete_instance(id_handle())
-
end
-
1
def destroy_and_delete(opts={})
-
delete()
-
end
-
end
-
end
-
-
2
module DTK; class NodeGroup
-
2
module Clone; module Mixin
-
1
def clone_post_copy_hook(clone_copy_output,opts={})
-
return if opts[:no_post_copy_hook]
-
super_opts = opts.merge(:donot_create_pending_changes => true, :donot_create_internal_links => true)
-
super(clone_copy_output,super_opts)
-
opts[:outermost_ports] = super_opts[:outermost_ports] if super_opts[:outermost_ports]
-
-
clone_source_obj = clone_copy_output.source_object
-
component = clone_copy_output.objects.first
-
override_attrs = {:ng_component_id => component[:id]}
-
node_clone_opts = [:ret_new_obj_with_cols].inject({}) do |h,k|
-
opts.has_key?(k) ? h.merge(k => opts[k]) : h
-
end
-
get_node_group_members().each{|node|node.clone_into(clone_source_obj,override_attrs,node_clone_opts)}
-
end
-
-
# clone components and links on this node group to node
-
1
def clone_into_node(node)
-
# get the components on the node group (except those created through link def on create event since these wil be created in clone_external_attribute_links call
-
ng_cmps = get_objs(:cols => [:cmps_for_clone_into_node]).map{|r|r[:component]}
-
return if ng_cmps.empty?
-
node_external_ports = clone_components(ng_cmps,node)
-
clone_external_attribute_links(node_external_ports,node)
-
end
-
1
private
-
1
def clone_components(node_group_cmps,node)
-
external_ports = Array.new
-
# order components to respect dependencies
-
ComponentOrder.derived_order(node_group_cmps) do |ng_cmp|
-
clone_opts = {
-
:ret_new_obj_with_cols => [:id,:display_name],
-
:outermost_ports => Array.new,
-
:use_source_impl_and_template => true,
-
:no_constraint_checking => true
-
}
-
override_attrs = {:ng_component_id => ng_cmp[:id]}
-
node.clone_into(ng_cmp,override_attrs,clone_opts)
-
external_ports += clone_opts[:outermost_ports]
-
end
-
external_ports
-
end
-
-
1
def clone_external_attribute_links(node_external_ports,node)
-
port_link_info = ret_port_link_info(node_external_ports)
-
return if port_link_info.empty?
-
# TODO: can also look at approach were if one node member exists already can do simpler copy
-
port_link_info.each do |pl|
-
port_link = pl[:node_group_port_link]
-
port_link.create_attribute_links(node.id_handle)
-
end
-
end
-
-
1
def ret_port_link_info(node_external_ports)
-
ret = Array.new
-
return ret if node_external_ports.empty?
-
# TODO this makes asseumption that can find cooresponding port on node group by matching on port display_name
-
# get the node group ports that correspond to node_external_ports
-
# TODO: this can be more efficient if made into ajoin
-
ng_id = id()
-
raise Error.new("Need to check: semantics of :link_def_info has changed to use outer joins")
-
sp_hash = {
-
:cols => [:id,:link_def_info,:display_name],
-
:filter => [:and, [:eq, :node_node_id, ng_id], [:oneof, :display_name, node_external_ports.map{|r|r[:display_name]}]]
-
}
-
ng_ports = Model.get_objs(model_handle(:port),sp_hash)
-
ng_port_ids = ng_ports.map{|r|r[:id]}
-
-
# get the ng_port links
-
sp_hash = {
-
:cols => [:id, :group_id,:input_id,:output_id,:temporal_order],
-
:filter => [:or, [:oneof, :input_id, ng_port_ids], [:oneof, :output_id, ng_port_ids]]
-
}
-
ng_port_links = Model.get_objs(model_handle(:port_link),sp_hash)
-
-
# form the node_port_link_hashes by subsitituting corresponding node port sfor ng ports
-
ndx_node_port_ids = node_external_ports.inject({}){|h,r|h.merge(r[:display_name] => r[:id])}
-
ndx_ng_ports = ng_ports.inject({}){|h,r|h.merge(r[:id] => r)}
-
ng_port_links.map do |ng_pl|
-
if ng_port_ids.include?(ng_pl[:input_id])
-
index = :input_id
-
ng_port_id = ng_pl[:input_id]
-
else
-
index = :output_id
-
ng_port_id = ng_pl[:output_id]
-
end
-
port_display_name = ndx_ng_ports[ng_port_id][:display_name]
-
node_port_id = ndx_node_port_ids[port_display_name]
-
other_index = (index == :input_id ? :output_id : :input_id)
-
{:node_group_port_link => ng_pl, :node_port_link_hash => {index => node_port_id, other_index => ng_pl[other_index]}}
-
end
-
end
-
end; end
-
end; end
-
-
=begin
-
TODO: ***; may want to put in version of this for varaibles taht are not input ports; so change to var at node group level propagates to teh node members; for matching would not leverage the component ng_component_id
-
-
TODO: currently not used because instead treating node group more like proxy for node members; keeping in
-
for now in case turns out taking this approach will be more efficient
-
node_components = get_node_group_members().map{|node|node.clone_into(clone_source_obj,override_attrs,node_clone_opts)}
-
-
unless node_components.empty?
-
ng_component = clone_copy_output.objects.first
-
add_links_between_ng_and_node_components(ng_component,node_components)
-
end
-
-
end
-
private
-
-
# this is use technique that links between ng and component attributes and indirect propagation; problematic when the node groupo side has output attribute
-
alternative is adding links at time that node to ng link is added and special processing when attribute changed at ng level
-
def add_links_between_ng_and_node_components(ng_cmp,node_cmps)
-
# get all the relevant attributes
-
ng_cmp_id = ng_cmp[:id]
-
ng_plus_node_cmp_ids = node_cmps.map{|r|r[:id]} + [ng_cmp_id]
-
attr_mh = ng_cmp.model_handle(:attribute)
-
-
cols = AttributeLink.attribute_info_cols()
-
cols << AttrFieldToMatchOn unless cols.include?(AttrFieldToMatchOn)
-
cols << :component_component_id unless cols.include?(:component_component_id)
-
sp_hash = {
-
:cols => cols,
-
:filter => [:oneof, :component_component_id, ng_plus_node_cmp_ids]
-
}
-
attrs = Model.get_objs(attr_mh,sp_hash)
-
return if attrs.empty?
-
-
# partition into attributes on node group and ones on nodes
-
# index by AttrFieldToMatchOn
-
ng_ndx = attrs.select{|r|r[:component_component_id] == ng_cmp_id}.inject({}) do |h,r|
-
h.merge(r[AttrFieldToMatchOn] => r[:id])
-
end
-
# build up link rows to create
-
attr_link_rows = attrs.select{|r|r[:component_component_id] != ng_cmp_id}.map do |r|
-
index = r[AttrFieldToMatchOn]
-
{
-
:output_id => ng_ndx[index],
-
:input_id => r[:id],
-
:function => "eq"
-
}
-
end
-
opts = {:donot_create_pending_changes => true, :attr_rows => attrs}
-
parent_idh = id_handle().get_top_container_id_handle(:target,:auth_info_from_self => true)
-
AttributeLink.create_attribute_links(parent_idh,attr_link_rows,opts)
-
end
-
-
AttrFieldToMatchOn = :display_name
-
=end
-
1
module DTK
-
1
class NodeGroupRelation < Model
-
1
def self.get_node_member_assembly?(node_member_idh)
-
sp_hash = {
-
:cols => [:id,:node_member_assembly],
-
:filter => [:eq,:node_id,node_member_idh.get_id()]
-
}
-
ngr = get_obj(node_member_idh.createMH(:node_group_relation),sp_hash)
-
ngr && ngr[:assembly]
-
end
-
-
1
def spans_target?()
-
update_object!(:datacenter_datacenter_id,:node_id)
-
if self[:node_id].nil? and self[:datacenter_datacenter_id]
-
id_handle(:model_name => :target,:id => self[:datacenter_datacenter_id])
-
end
-
end
-
-
1
def self.spans_target?(ngr_list)
-
if ngr_list.size == 1
-
ngr_list.first.spans_target?()
-
end
-
end
-
-
1
def self.create_to_span_target?(node_group_idh,target_idh,opts={})
-
target_id = target_idh.get_id()
-
node_group_id = node_group_idh.get_id
-
-
ngr_mh = node_group_idh.create_peerMH(:node_group_relation)
-
-
# check if not created already
-
unless opts[:donot_check_if_exists]
-
sp_hash = {
-
:cols => [:id,:node_id],
-
:filter => [:and, [:eq,:node_group_id,node_group_id],[:eq, :datacenter_datacenter_id,target_id ]]
-
}
-
matches = get_objs(ngr_mh,sp_hash)
-
error = nil
-
if matches.size > 1
-
error = true
-
else matches.size == 1
-
if matches.spans_target?()
-
return
-
else
-
error = true
-
end
-
end
-
if error
-
raise ErrorUsage.new("Cannot create a node group into spanning target if attached to spsecific nodes")
-
end
-
end
-
display_name = "spans-target-#{target_id.to_s}"
-
create_row = {
-
:ref => display_name,
-
:display_name => display_name,
-
:datacenter_datacenter_id => target_id,
-
:node_group_id => node_group_id
-
}
-
create_from_row(ngr_mh,create_row)
-
end
-
end
-
end
-
1
module DTK
-
1
class NodeImage < Model
-
1
def self.find_iaas_match(target,logical_image_name)
-
legacy_bridge_to_node_template(target,logical_image_name)
-
end
-
1
private
-
1
def self.legacy_bridge_to_node_template(target,logical_image_name)
-
image_id, os_type = Node::Template.find_image_id_and_os_type(logical_image_name,target)
-
image_id
-
end
-
end
-
end
-
1
module DTK
-
1
class NodeImageAttribute < Model
-
1
class Size < self
-
1
def self.find_iaas_match(target,logical_size)
-
legacy_bridge_to_node_template(target,logical_size)
-
end
-
1
private
-
1
def self.legacy_bridge_to_node_template(target,logical_size)
-
ret = nil
-
opts_get = {:cols => [:id,:group_id,:ref,:rules,:os_type]}
-
matching_nbrs = Node::Template.get_matching_node_binding_rules(target,opts_get)
-
if matching_nbrs.empty?
-
return ret
-
end
-
# hack to use nbrs to find size only info
-
match = matching_nbrs.find do |nbr|
-
nbr[:ref] =~ Regexp.new("-#{logical_size}$")
-
end
-
if match
-
match[:matching_rule][:node_template][:size]
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Port < Model
-
####################
-
1
def self.common_columns()
-
8
[:id,:group_id,:display_name,:name,:description,:direction,:type,:location,:containing_port_id,:node_id,:component_id,:link_def_id]
-
end
-
-
1
def self.check_valid_id(model_handle,id,opts={})
-
if opts[:assembly_idh]
-
sp_hash = {
-
:cols => [:id,:node],
-
:filter => [:eq,:id,id]
-
}
-
rows = get_objs(model_handle,sp_hash)
-
unless port = rows.first
-
raise ErrorIdInvalid.new(id,pp_object_type())
-
end
-
unless port[:node][:assembly_id] == opts[:assembly_idh].get_id()
-
Raise ErrorUsage.new("Port with id (#{id.to_s}) does not belong to assembly")
-
end
-
id
-
else
-
check_valid_id_default(model_handle,id)
-
end
-
end
-
-
# name should be of form <node>/<component>, like server/rsyslog::server
-
1
def self.name_to_id(model_handle,name,opts={})
-
unless opts[:assembly_idh] and opts[:connection_type]
-
raise Error.new("Unexpected options given in Port.name_to_id (#{opts.inspect}")
-
end
-
assembly_id = opts[:assembly_idh].get_id()
-
conn_type = opts[:connection_type]
-
node_display_name,poss_port_display_names = Port.parse_to_ret_display_name(name,conn_type,opts)
-
unless node_display_name
-
raise ErrorUsage.new("Port name (#{name}) is ill-formed")
-
end
-
augmented_sp_hash = {
-
:cols => [:id,:node],
-
:filter => [:oneof,:display_name,poss_port_display_names],
-
:post_filter => lambda{|r|r[:node][:assembly_id] == assembly_id and r[:node][:display_name] == node_display_name}
-
}
-
name_to_id_helper(model_handle,name,augmented_sp_hash)
-
end
-
-
# virtual attribute defs
-
1
def name()
-
self[:display_name]
-
end
-
-
1
def node_id()
-
self[:node_node_id]
-
end
-
-
###########
-
1
RefDelim = '___'
-
-
# this is an augmented port that has keys: node and optionally :link_def and nested_component
-
1
def display_name_print_form(opts = {})
-
info = parse_port_display_name()
-
cmp_ref = ((info[:module] == info[:component]) ? info[:component] : "#{info[:module]}::#{info[:component]}")
-
if title = self[:nested_component] && ComponentTitle.title?(self[:nested_component])
-
cmp_ref = ComponentTitle.display_name_with_title(cmp_ref, title)
-
end
-
node = self[:node]
-
hide_assembly_wide_node = opts[:hide_assembly_wide_node] && node[:display_name].eql?('assembly_wide')
-
hide_assembly_wide_node ? cmp_ref : "#{node[:display_name]}/#{cmp_ref}"
-
end
-
-
# this is an augmented port that has keys: node and optionally :link_def and nested_component
-
1
def print_form_hash()
-
ret = {
-
:id => self[:id],
-
:type => link_def_name,
-
:service_ref => display_name_print_form()
-
}
-
if link_def = self[:link_def]
-
ret.merge!(link_def.hash_subset(:required, :description))
-
end
-
ret
-
end
-
-
1
def self.ref_from_display_name(display_name)
-
display_name
-
end
-
-
# TODO: assumption that ref and display_name are the same; start to use ref_from_display_name
-
1
def component_name()
-
parse_port_display_name()[:component_type]
-
end
-
1
def component_type()
-
parse_port_display_name()[:component_type]
-
end
-
1
def link_def_name()
-
parse_port_display_name()[:link_def_ref]
-
end
-
1
def title?()
-
parse_port_display_name()[:title]
-
end
-
-
# TODO: this should be deprecated;
-
1
def ref_num()
-
# self[:display_name].split(RefDelim)[3].to_i
-
raise Error.new("using deprecated method port#ref_num")
-
end
-
-
1
def parse_port_display_name()
-
display_name = get_field?(:display_name)
-
self.class.parse_port_display_name(display_name)
-
end
-
1
def set_port_info!()
-
self[:port_info] ||= parse_port_display_name()
-
end
-
-
# methods related to internal form of display_name/ref
-
# example internal form ([output|input]___)component_[internal|external]___hdp-hadoop__namenode___namenode_conn[___title]
-
1
class << self
-
1
private
-
1
def ret_encoded_port_name(type,component_type,link_def,dir,title=nil)
-
link_def_ref = link_def[:link_type]
-
ret = "#{dir}#{RefDelim}#{type}#{RefDelim}#{component_type}#{RefDelim}#{link_def_ref}"
-
title ? "#{ret}#{RefDelim}#{title}" : ret
-
end
-
end
-
-
1
ParseRegex = {
-
:with_title => Regexp.new("^component_(internal|external|internal_external)#{RefDelim}(.+)#{RefDelim}(.+)#{RefDelim}(.+$)"),
-
:without_title => Regexp.new("^component_(internal|external|internal_external)#{RefDelim}(.+)#{RefDelim}(.+$)")
-
}
-
1
def self.parse_port_display_name(port_display_name)
-
-
ret = Hash.new
-
# TODO: deprecate forms without input or output
-
if port_display_name =~ Regexp.new("^input#{RefDelim}(.+$)")
-
port_display_name = $1
-
ret.merge!(:direction => :input)
-
elsif port_display_name =~ Regexp.new("^output#{RefDelim}(.+$)")
-
port_display_name = $1
-
ret.merge!(:direction => :output)
-
end
-
-
if port_display_name =~ ParseRegex[:with_title]
-
ret.merge!(:port_type => $1,:component_type => $2,:link_def_ref => $3, :title => $4)
-
elsif port_display_name =~ ParseRegex[:without_title]
-
ret.merge!(:port_type => $1,:component_type => $2,:link_def_ref => $3)
-
else
-
raise Error.new("unexpected display name (#{port_display_name})")
-
end
-
-
component_type = ret[:component_type]
-
if component_type =~ Regexp.new("(^.+)__(.+$)")
-
ret.merge!(:module => $1,:component => $2)
-
else
-
ret.merge!(:module => component_type,:component => component_type)
-
end
-
-
ret
-
end
-
# end: methods related to internal form of display_name/ref
-
-
# this function maps from service ref to internal display name
-
# node_display_name,poss_port_display_names
-
# input is of form form <node>/<component>, like server/rsyslog::server
-
# if error, returns nil
-
1
def self.parse_to_ret_display_name(service_ref_name,conn_type,opts={})
-
if service_ref_name =~ Regexp.new("(^[^/]+)/([^/]+$)")
-
node_display_name = $1
-
cmp_ref = $2
-
cmp_ref_internal_form = cmp_ref.gsub(/::/,"__")
-
dirs = (opts[:direction] ? [options[:direction]] : ["input","output"])
-
int_or_ext = opts[:internal_or_external]
-
int_or_ext = (int_or_ext ? [int_or_ext] : ["internal","external"])
-
poss_p_names = dirs.map do |dir|
-
int_or_ext.map do |ie|
-
"#{dir}#{RefDelim}component_#{ie}#{RefDelim}#{cmp_ref_internal_form}#{RefDelim}#{conn_type}"
-
end
-
end.flatten
-
[node_display_name,poss_p_names]
-
end
-
end
-
-
1
def self.set_ports_link_def_and_cmp_ids(port_mh,ports,cmps,link_defs)
-
update_rows = ports.map do |port|
-
parsed_port_name = parse_port_display_name(port[:display_name])
-
cmp_type = parsed_port_name[:component_type]
-
link_def_ref = parsed_port_name[:link_def_ref]
-
node_node_id = port[:node_node_id]
-
port_title = parsed_port_name[:title]
-
# TODO: check if need to match on version too or can only be one version type per component
-
cmp_match = cmps.find do |cmp|
-
if cmp[:component_type] == cmp_type and cmp[:node_node_id] == node_node_id
-
if port_title
-
cmp_title = ComponentTitle.title?(cmp)
-
cmp_title == port_title
-
else
-
true
-
end
-
end
-
end
-
unless cmp_match
-
raise Error.new("Cannot find matching component for cloned port with id (#{port[:id].to_s})")
-
end
-
cmp_id = cmp_match[:id]
-
el = {:id => port[:id],:component_id => cmp_id}
-
if link_def_match = link_defs.find{|ld|link_def_match?(ld,cmp_id,link_def_ref,parsed_port_name[:direction])}
-
el.merge(:link_def_id => link_def_match[:id])
-
else
-
# TODO: check why after refactor of link_def/deps this before casting nil started causing a postgres problem; looks like this clause always fired so
-
# may be before change link_def_id only mtached null; to diagnose can change back temporarily to el.merge(:link_def_id => nil)
-
el.merge(:link_def_id => SQL::ColRef.null_id)
-
end
-
end
-
update_from_rows(port_mh,update_rows)
-
end
-
-
1
private
-
1
def self.link_def_match?(ld,cmp_id,link_def_ref,dir)
-
if ld[:component_component_id] == cmp_id and
-
ld[:display_name].gsub(/^remote_/,"").gsub(/^local_/,"") == link_def_ref
-
if dir
-
if ld[:display_name] =~ /^remote_/
-
dir.to_s == direction_from_local_remote("remote")
-
elsif ld[:display_name] =~ /^local_/
-
dir.to_s == direction_from_local_remote("local")
-
end
-
else
-
true
-
end
-
end
-
end
-
-
1
def self.port_ref(type,attr)
-
ref_num = (attr[:component_ref_num]||1).to_s
-
"#{type}#{RefDelim}#{attr[:component_ref]}#{RefDelim}#{attr[:display_name]}#{RefDelim}#{ref_num}"
-
end
-
-
1
def self.strip_type(ref)
-
ref.gsub(Regexp.new("^[^_]+#{RefDelim}"),"")
-
end
-
-
1
def self.add_type(type,stripped_ref)
-
"#{type}#{RefDelim}#{stripped_ref}"
-
end
-
1
public
-
# returns nil if filtered
-
1
def filter_and_process!(i18n,*types)
-
unless types.empty?
-
return nil unless types.include?(self[:type])
-
if types.include?("external") #TODO: this special case may go away
-
return nil if self[:containing_port_id].nil?
-
end
-
end
-
-
merge!(:display_name => get_i18n_port_name(i18n,self)) if i18n
-
merge!(:port_type=> self[:direction]) #TODO: should probably deprecate after get rid of using in front end
-
materialize!(self.class.common_columns())
-
end
-
-
1
def self.get_attribute_info(port_id_handles)
-
get_objects_in_set_from_sp_hash(port_id_handles,:columns => [:id,:attribute]).map do |r|
-
{
-
:id => r[:id],
-
:attribute => r[:attribute_direct]||r[:attribute_nested]
-
}
-
end
-
end
-
-
1
def self.ret_port_create_hash(link_def,node,component,opts={})
-
node_id = node.id()
-
port_mh = node.model_handle_with_auth_info.create_childMH(:port)
-
component_type = component.get_field?(:component_type)
-
type =
-
if link_def[:has_external_link]
-
link_def[:has_internal_link] ? "component_internal_external" : "component_external"
-
else #will be just link_def[:has_internal_link]
-
"component_internal"
-
end
-
-
# TODO: clean up direction to make it cleaner how you set it
-
dir = opts[:direction]||direction_from_local_remote(link_def[:local_or_remote],opts)
-
cmp_ref = opts[:component_ref]
-
# TODO: cleanup logic aroudn when cmp_ref is passed vs when it is not
-
title =
-
if cmp_ref
-
ComponentTitle.title?(cmp_ref)
-
elsif component
-
ComponentTitle.title?(component)
-
end
-
-
display_name = ref = ret_encoded_port_name(type,component_type,link_def,dir,title)
-
location_asserted = ret_location_asserted(component_type,link_def[:link_type])
-
row = {
-
:ref => ref,
-
:display_name => display_name,
-
:direction => dir,
-
:node_node_id => node_id,
-
:component_type => component_type,
-
:component_id => component.id(),
-
:link_type => link_def[:link_type],
-
:type => type
-
}
-
row.merge!(:location_asserted => location_asserted) if location_asserted
-
# TODO: not sure if we need opts[:remote_side]
-
unless dir == "output" or opts[:remote_side] or link_def[:id].nil?
-
row.merge!(:link_def_id => link_def[:id])
-
end
-
row
-
end
-
-
1
class << self
-
1
private
-
1
def direction_from_local_remote(local_or_remote,opts={})
-
# TODO: just heuristc for computing dir; also need to upport "<>" (bidirectional)
-
if opts[:remote_side]
-
case local_or_remote
-
when "local" then "output"
-
when "remote" then "input"
-
end
-
else
-
case local_or_remote
-
when "local" then "input"
-
when "remote" then "output"
-
end
-
end
-
end
-
-
# TODO: this should be in link defs
-
1
def ret_location_asserted(component_type,link_type)
-
(LocationMapping[component_type.to_sym]||{})[link_type.to_sym]
-
end
-
1
LocationMapping = {
-
:mysql__master => {
-
:master_connection => "east"
-
},
-
:mysql__slave => {
-
:master_connection => "west"
-
}
-
}
-
-
end
-
-
# virtual attribute defs
-
# related to UX direction
-
1
def location()
-
return self[:location_asserted] if self[:location_asserted]
-
# TODO: stub
-
return "east" if self[:display_name] =~ /nagios__server/
-
return "east" if self[:display_name] =~ /mysql__master/
-
return "west" if self[:display_name] =~ /nagios__client/
-
return "east" if self[:display_name] =~ /ganglia server/
-
return "west" if self[:display_name] =~ /ganglia monitor/
-
-
case self[:direction]
-
when "output" then "north"
-
when "input" then "south"
-
end
-
end
-
-
end
-
end
-
1
module DTK
-
1
class PortLink < Model
-
1
def self.common_columns()
-
[:id,:group_id,:input_id,:output_id,:assembly_id,:temporal_order]
-
end
-
-
1
def self.check_valid_id(model_handle,id,opts={})
-
if opts.empty?()
-
check_valid_id_default(model_handle,id)
-
elsif Aux.has_just_these_keys?(opts,[:assembly_idh])
-
sp_hash = {
-
:cols => [:id,:group_id,:assembly_id],
-
:filter => [:eq,:id,id]
-
}
-
rows = get_objs(model_handle,sp_hash)
-
unless port_link = rows.first
-
raise ErrorIdInvalid.new(id,pp_object_type())
-
end
-
unless port_link[:assembly_id] == opts[:assembly_idh].get_id()
-
raise ErrorUsage.new("Port with id (#{id.to_s}) does not belong to assembly")
-
end
-
id
-
else
-
raise Error.new("Unexpected options (#{opts.inspect})")
-
end
-
end
-
-
1
def list_attribute_mappings()
-
filter = [:eq,:port_link_id,id()]
-
AttributeLink.get_augmented(model_handle(:attribute_link),filter).map do |al|
-
{
-
:input_attribute => al[:input].print_form(),
-
:output_attribute => al[:output].print_form()
-
}
-
end
-
end
-
-
# create port link adn associated attribute links
-
# can clone if needed attributes on a service node group to its members
-
1
def self.create_port_and_attr_links__clone_if_needed(target_idh,port_link_hash,opts={})
-
unless link_def_context = get_link_def_context?(target_idh,port_link_hash)
-
raise PortLinkError.new("Illegal link")
-
end
-
port_link_to_create = port_link_hash.merge(:temporal_order => link_def_context.temporal_order)
-
port_link = nil
-
Transaction do
-
port_link = create_from_links_hash(target_idh,[port_link_to_create],opts).first
-
AttributeLink.create_from_link_defs__clone_if_needed(target_idh,link_def_context,opts.merge(:port_link_idh => port_link.id_handle))
-
end
-
port_link
-
end
-
-
# create attribute links from this port link
-
1
def create_attribute_links(parent_idh,opts={})
-
# The reason to have create_attribute_links is to document callers from which we know no cloning will be needed
-
create_attribute_links__clone_if_needed(parent_idh,opts)
-
end
-
# can clone if needed attributes on a service node group to its members
-
# this sets temporal order if have option :set_port_link_temporal_order
-
1
def create_attribute_links__clone_if_needed(parent_idh,opts={})
-
update_obj!(:input_id,:output_id)
-
unless link_def_context = get_link_def_context?(parent_idh)
-
raise PortLinkError.new("Illegal link")
-
end
-
if opts[:set_port_link_temporal_order]
-
if temporal_order = link_def_context.temporal_order
-
update(:temporal_order => temporal_order)
-
end
-
end
-
opts_create = Aux.hash_subset(opts,[:filter]).merge(:port_link_idh => id_handle())
-
AttributeLink.create_from_link_defs__clone_if_needed(parent_idh,link_def_context,opts_create)
-
self
-
end
-
-
1
def self.port_link_ref(port_link_ref_info)
-
p = port_link_ref_info # for succinctness
-
"#{p[:assembly_template_ref]}--#{p[:in_node_ref]}-#{p[:in_port_ref]}--#{p[:out_node_ref]}-#{p[:out_port_ref]}"
-
end
-
-
# TODO: deprecate after removing v1 assembly export adaptor
-
1
def self.ref_from_ids(input_id,output_id)
-
ref_from_ids_for_service_instances(input_id,output_id)
-
end
-
-
1
private
-
# TODO: possibly change to using refs for service_instances like do for assembly templates
-
1
def self.ref_from_ids_for_service_instances(input_id,output_id)
-
"port_link:#{input_id}-#{output_id}"
-
end
-
-
1
def self.create_from_links_hash(target_idh,links_to_create,opts={})
-
override_attrs = opts[:override_attrs]||{}
-
rows = links_to_create.map do |link|
-
ref = ref_from_ids_for_service_instances(link[:input_id],link[:output_id])
-
{
-
:input_id => link[:input_id],
-
:output_id => link[:output_id],
-
:datacenter_datacenter_id => target_idh.get_id(),
-
:ref => ref
-
}.merge(override_attrs)
-
end
-
create_opts = {:returning_sql_cols => [:id,:input_id,:output_id]}
-
port_link_mh = target_idh.create_childMH(:port_link)
-
# TODO: push in use of :c into create_from_rows
-
create_from_rows(port_link_mh,rows,create_opts).map{|hash|new(hash,port_link_mh[:c])}
-
end
-
-
1
def get_link_def_context?(parent_idh)
-
self.class.get_link_def_context?(parent_idh,self)
-
end
-
1
def self.get_link_def_context?(parent_idh,port_link_hash)
-
ret = nil
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_type,:direction,:link_type,:link_def_info,:node_node_id],
-
:filter => [:oneof, :id, [port_link_hash[:input_id],port_link_hash[:output_id]]]
-
}
-
ports_with_link_def_info = get_objs(parent_idh.createMH(:port),sp_hash)
-
local_port_cmp_rows = ports_with_link_def_info.select{|r|(r[:link_def]||{})[:local_or_remote] == "local"}
-
return ret if local_port_cmp_rows.empty?
-
local_port_cmp_info = local_port_cmp_rows.first #all elements wil agree on the parts aside from link_def_link
-
-
remote_port_cmp_rows = ports_with_link_def_info.select{|r|r[:id] != local_port_cmp_info[:id]}
-
if remote_port_cmp_rows.empty?
-
raise Error.new("Unexpected result that a remote port cannot be found")
-
end
-
remote_port_cmp_info = remote_port_cmp_rows.first
-
-
return ret unless local_port_cmp_info[:link_type] == remote_port_cmp_info[:link_type]
-
# find the matching link_def_link
-
remote_cmp_type = remote_port_cmp_info[:component_type]
-
-
# look for matching link
-
components_coreside = (local_port_cmp_info[:node_node_id] == remote_port_cmp_info[:node_node_id])
-
match = local_port_cmp_rows.find do |r|
-
possible_link = r[:link_def_link]||{}
-
if possible_link[:remote_component_type] == remote_cmp_type
-
if components_coreside
-
possible_link[:type] == "internal"
-
else
-
possible_link[:type] == "external"
-
end
-
end
-
end
-
return ret unless match
-
-
# get remote component
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:node_node_id,:component_type,:implementation_id,:extended_base],
-
:filter => [:and,Component::Instance.filter(remote_port_cmp_info.component_type,remote_port_cmp_info.title?()),
-
[:eq,:node_node_id,remote_port_cmp_info[:node_node_id]]
-
]
-
}
-
local_cmp = local_port_cmp_info[:component]
-
rows = Model.get_objs(local_cmp.model_handle(),sp_hash)
-
if rows.size == 1
-
remote_cmp = rows.first
-
elsif rows.empty?
-
raise Error.new("Unexpected that no remote component found")
-
else
-
raise Error.new("Unexpected that getting remote port link component does not return unique element")
-
end
-
link_def_link = match[:link_def_link].merge!(:local_component_type => local_cmp[:component_type])
-
-
LinkDef::Context.create(link_def_link,[{:component => local_cmp},{:component => remote_cmp}])
-
end
-
end
-
-
1
class PortLinkError < ErrorUsage
-
end
-
end
-
1
module DTK
-
1
class Project < Model
-
1
def self.create_new_project(model_handle,name,type)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:eq,:display_name,name]
-
}
-
unless get_objs(model_handle,sp_hash).empty?
-
raise Error.new("project with name #{name} exists already")
-
end
-
row = {
-
:display_name => name,
-
:ref => name,
-
:type => type
-
}
-
create_from_row(model_handle,row)
-
end
-
-
1
def self.get_all(model_handle)
-
3
sp_hash = {:cols => [:id,:display_name,:group_id,:type]}
-
3
get_objs(model_handle,sp_hash)
-
end
-
-
# TODO: this wil be deprecated, but also looks like it gets wrong components
-
1
def get_implementaton_tree(opts={})
-
sp_hash = {:cols => [:id,:display_name,:type,:implementation_tree]}
-
unravelled_ret = get_objs(sp_hash)
-
ret_hash = Hash.new
-
-
i18n = get_i18n_mappings_for_models(:component)
-
unravelled_ret.each do |r|
-
# TODO: hack until determine right way to treat relationship between component and implementation versions
-
index = r[:component][:component_type]
-
cmp = ret_hash[index]
-
# TODO: dont think ids are used; but for consistency using lowest id instance
-
if cmp.nil? or r[:component][:id] < cmp[:id]
-
cmp = ret_hash[index] = r[:component].materialize!(Component.common_columns())
-
# TODO: see if cleaner way to put in i18n names
-
cmp[:name] = i18n_string(i18n,:component, cmp[:name])
-
end
-
impls = cmp[:implementations] ||= Hash.new
-
# TODO: this is hack taht needs fixing
-
impls[r[:implementation][:id]] ||= r[:implementation].merge(:version => r[:component][:version])
-
end
-
ret = ret_hash.values.map{|ct|ct.merge(:implementations => ct[:implementations].values)}
-
return ret unless opts[:include_file_assets]
-
-
impl_idhs = ret.map{|ct|ct[:implementations].map{|impl|impl.id_handle}}.flatten(1)
-
indexed_asset_files = Implementation.get_indexed_asset_files(impl_idhs)
-
ret.each{|ct|ct[:implementations].each{|impl|impl.merge!(:file_assets => indexed_asset_files[impl[:id]])}}
-
ret
-
end
-
-
1
def get_module_tree(opts={})
-
ndx_ret = Hash.new
-
sp_hash = {:cols => [:id,:display_name,:type,:module_tree]}
-
unravelled_ret = get_objs(sp_hash)
-
i18n = get_i18n_mappings_for_models(:component)
-
unravelled_ret.each do |r|
-
impl_id = r[:implementation][:id]
-
cmps = (ndx_ret[impl_id] ||= r[:implementation].merge(:components => Array.new))[:components]
-
if r[:component]
-
cmp = r[:component].materialize!(Component.common_columns())
-
# TODO: see if cleaner way to put in i18n names
-
cmp[:name] = i18n_string(i18n,:component, cmp[:name])
-
cmps << cmp
-
end
-
end
-
ret = ndx_ret.values
-
return ret unless opts[:include_file_assets]
-
-
impl_idhs = ret.map{|impl|impl.id_handle}
-
indexed_asset_files = Implementation.get_indexed_asset_files(impl_idhs)
-
ret.each{|impl|impl.merge!(:file_assets => indexed_asset_files[impl[:id]]||[])}
-
ret
-
end
-
-
1
def get_target_tree()
-
# get and index node group members (index is [target_id][node_group_id]
-
ndx_ng_members = Hash.new
-
get_objs(:cols => [:id,:node_group_relations]).each do |r|
-
pntr = ndx_ng_members[r[:target][:id]] ||= Hash.new
-
ng_id = r[:node_group_relation][:node_group_id]
-
(pntr[ng_id] ||= Array.new) << r[:node_group_relation][:node_id]
-
end
-
-
unravelled_ret = get_objs(:cols => [:id,:display_name,:type,:target_tree])
-
ret_hash = Hash.new
-
unravelled_ret.each do |r|
-
target_id = r[:target][:id]
-
unless target = ret_hash[target_id]
-
target = ret_hash[target_id] ||= r[:target].materialize!(Target.common_columns()).merge(:model_name => "target")
-
end
-
nodes = target[:nodes] ||= Hash.new
-
next unless r[:node]
-
unless node = nodes[r[:node][:id]]
-
node = nodes[r[:node][:id]] = r[:node].materialize!(Node.common_columns())
-
if node.is_node_group?
-
node_group_members = (ndx_ng_members[target_id]||{})[node[:id]]|| Array.new
-
node.merge!(:node_group_members => node_group_members)
-
end
-
end
-
components = node[:components] ||= Hash.new
-
components[r[:component][:id]] = r[:component].materialize!(Component.common_columns()) if r[:component]
-
end
-
ret_hash.values.map do |t|
-
nodes = t[:nodes].values.map do |n|
-
n.merge(:components => n[:components].values)
-
end
-
t.merge(:nodes => nodes)
-
end
-
end
-
-
1
def destroy_and_delete_nodes()
-
targets = get_objs(:cols => [:targets]).map{|r|r[:target]}
-
targets.each{|t|t.destroy_and_delete_nodes()}
-
end
-
-
1
def delete_projects_repo_branches()
-
sp_hash = {
-
:cols => [:repo,:branch],
-
:filter => [:eq, :project_project_id, id()]
-
}
-
impl_mh = model_handle(:implementation)
-
impls = Model.get_objs(impl_mh,sp_hash)
-
impls.each{|impl|Repo.delete(:implementation => impl)}
-
end
-
end
-
end
-
-
1
module XYZ
-
1
class Region < Model
-
1
set_relation_name(:region,:region)
-
1
class << self
-
1
def up()
-
1
column :ds_attributes, :json
-
1
column :is_deployed, :boolean, :default => false
-
1
column :type, :varchar, :size => 25 #type is availability_zone, datacenter, vdc
-
1
one_to_many :region
-
1
many_to_one :library
-
end
-
end
-
end
-
# TBD: do not include association between region gateway and network region of node since this is inferede through theer connection to a network partition; this also allows for more advanced models where node or gateway spans two differnt regions
-
1
class AssocRegionNetwork < Model
-
1
set_relation_name(:region,:assoc_network_partition)
-
1
class << self
-
1
def up()
-
1
foreign_key :network_partition_id, :network_partition, FK_CASCADE_OPT
-
1
foreign_key :region_id, :region, FK_CASCADE_OPT
-
1
many_to_one :library
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Repo < Model
-
1
r8_nested_require('repo','with_branch')
-
1
r8_nested_require('repo','diff')
-
1
r8_nested_require('repo','diffs')
-
1
r8_nested_require('repo','remote')
-
1
r8_nested_require('repo','connection_to_remote')
-
1
extend ConnectionToRemoteClassMixin
-
1
include ConnectionToRemoteMixin
-
-
1
def self.common_columns()
-
[:id,:display_name,:repo_name,:local_dir]
-
end
-
-
###virtual columns
-
1
def base_dir()
-
self[:local_dir].gsub(/\/[^\/]+$/,"")
-
end
-
####
-
1
def self.get_all_repo_names(model_handle)
-
get_objs(model_handle,:cols => [:repo_name]).map{|r|r[:repo_name]}
-
end
-
-
1
def get_acesss_rights(repo_user_idh)
-
sp_hash = {
-
:cols => [:id,:group_id,:access_rights,:repo_usel_id,:repo_id],
-
:filter => [:and, [:eq,:repo_id,id()],[:eq,:repo_user_id,repo_user_idh.get_id()]]
-
}
-
Model.get_obj(model_handle(:repo_user_acl),sp_hash)
-
end
-
-
1
def default_remote!()
-
RepoRemote.default_remote!(self.model_handle(:repo_remote), self.id)
-
end
-
-
1
def self.delete(repo_idh)
-
repo = repo_idh.create_object()
-
RepoManager.delete_repo(repo)
-
Model.delete_instance(repo_idh)
-
end
-
end
-
end
-
1
module DTK
-
1
class Repo
-
1
module ConnectionToRemoteClassMixin
-
1
def remote_ref(remote_repo_base,remote_repo_namespace)
-
Log.info_pp(["#TODO: ModuleBranch::Location: deprecate: remote_ref",caller[0..4]])
-
"#{remote_repo_base}--#{remote_repo_namespace}"
-
end
-
end
-
-
1
module ConnectionToRemoteMixin
-
1
def link_to_remote(local,remote)
-
RepoManager.link_to_remote_repo(get_field?(:repo_name),local.branch_name,remote.remote_ref(),remote.repo_url())
-
end
-
-
1
def push_to_remote(local,remote)
-
RepoManager.push_to_remote_repo(get_field?(:repo_name),local.branch_name,remote.remote_ref,remote.branch_name)
-
end
-
-
1
def unlink_remote(remote)
-
RepoManager.unlink_remote(get_field?(:repo_name),remote.remote_ref)
-
end
-
-
1
def ret_local_remote_diff(module_branch,remote)
-
remote_url = remote.repo_url()
-
remote_ref = remote.remote_ref()
-
remote_branch = remote.branch_name()
-
RepoManager.get_loaded_and_remote_diffs(remote_ref, get_field?(:repo_name), module_branch, remote_url, remote_branch)
-
end
-
-
1
def get_remote_diffs(module_branch,remote)
-
remote_url = remote.repo_url()
-
remote_ref = remote.remote_ref()
-
remote_branch = remote.branch_name()
-
RepoManager.get_remote_diffs(remote_ref, get_field?(:repo_name), module_branch, remote_url, remote_branch)
-
end
-
-
1
def get_local_branches_diffs(module_branch,base_branch, workspace_branch)
-
RepoManager.get_local_branches_diffs(get_field?(:repo_name), module_branch, base_branch, workspace_branch)
-
end
-
-
1
def hard_reset_branch_to_sha(module_branch, sha)
-
RepoManager.hard_reset_branch_to_sha(get_field?(:repo_name), module_branch, sha)
-
end
-
end
-
end
-
end
-
-
1
class DTK::Repo
-
1
class Diff
-
1
Attributes = [:new_file,:renamed_file,:deleted_file,:a_path,:b_path,:diff]
-
7
AttributeAssignFn = Attributes.inject(Hash.new){|h,a|h.merge(a => "#{a}=".to_sym)}
-
1
def initialize(hash_input)
-
hash_input.each{|a,v|send(AttributeAssignFn[a],v)}
-
end
-
-
1
def file_added()
-
@new_file && {:path => @a_path}
-
end
-
-
1
def file_renamed()
-
@renamed_file && {:old_path => @b_path, :new_path => @a_path}
-
end
-
-
1
def file_deleted()
-
@deleted_file && {:path => @a_path}
-
end
-
-
1
def file_modified()
-
((@new_file or @deleted_file or @renamed_file) ? nil : true) && {:path => @a_path}
-
end
-
-
1
private
-
1
attr_writer(*Attributes)
-
end
-
end
-
2
module DTK; class Repo
-
1
class Diffs < Array
-
1
attr_reader :a_sha,:b_sha
-
1
def initialize(array_diff_hashes,a_sha,b_sha)
-
super(array_diff_hashes.map{|hash|Diff.new(hash)})
-
@a_sha = a_sha
-
@b_sha = b_sha
-
end
-
-
# returns a hash with keys :file_renamed, :file_added, :file_deleted, :file_modified
-
1
def ret_summary()
-
DiffTypesAndMethods.inject(Summary.new) do |h,(diff_type, diff_method)|
-
res = map{|diff|diff.send(diff_method)}.compact
-
res.empty? ? h : h.merge(diff_type => res)
-
end
-
end
-
-
1
class Summary < SimpleHashObject
-
1
def initialize(diffs_hash=nil)
-
super()
-
(diffs_hash||{}).each do |t,v|
-
t = t.to_sym
-
if DiffTypes.include?(t)
-
self[t] = v
-
else
-
Log.error("unexpected sumamry diff type (#{t})")
-
end
-
end
-
end
-
1
def no_diffs?()
-
keys().empty?
-
end
-
1
def no_added_or_deleted_files?()
-
not (self[:files_renamed] or self[:files_added] or self[:files_deleted])
-
end
-
-
# opts can have
-
# :type which can be terms :module_dsl,:module_refs
-
# indicated what type of meta file to look for
-
1
def meta_file_changed?(opts={})
-
contains_a_dsl_filename?(self[:files_modified],opts) or
-
contains_a_dsl_filename?(self[:files_added],opts)
-
end
-
-
1
def file_changed?(path)
-
self[:files_modified] and !!self[:files_modified].find{|r|path(r) == path}
-
end
-
-
# note: in paths_to_add and paths_to_delete rename appears both since rename can be accomplsihed by a add + a delete
-
1
def paths_to_add()
-
(self[:files_added]||[]).map{|r|path(r)} + (self[:files_renamed]||[]).map{|r|r[:new_path]}
-
end
-
1
def paths_to_delete()
-
(self[:files_deleted]||[]).map{|r|path(r)} + (self[:files_renamed]||[]).map{|r|r[:old_path]}
-
end
-
1
DiffNames = [:renamed,:added,:deleted,:modified]
-
5
DiffTypes = DiffNames.map{|n|"files_#{n}".to_sym}
-
-
1
private
-
1
def path(r)
-
r["path"]||r[:path]
-
end
-
-
1
def contains_a_dsl_filename?(files_info,opts={})
-
return unless files_info
-
types = (opts[:type] ? [opts[:type]] : [:module_dsl,:module_refs])
-
!!files_info.find do |r|
-
(types.include?(:module_dsl) and ModuleDSL.isa_dsl_filename?(path(r))) or
-
(types.include?(:module_refs) and ModuleRefs.isa_dsl_filename?(path(r)))
-
end
-
end
-
end
-
-
5
DiffTypesAndMethods = Summary::DiffNames.map{|n|["files_#{n}".to_sym,"file_#{n}".to_sym]}
-
-
end
-
end; end
-
1
r8_require("#{::R8::Config[:sys_root_path]}/repo_manager_client/lib/repo_manager_client")
-
1
module DTK
-
1
class Repo
-
# TODO: may have better class name; this is really a remote repo server handler
-
1
class Remote
-
1
r8_nested_require('remote','auth')
-
1
include AuthMixin
-
-
1
def initialize(remote_or_repo_base=nil)
-
5
arg = remote_or_repo_base #for succinctness
-
5
if ModuleBranch::Location::Remote.includes?(arg)
-
@remote = arg
-
@project = @remote.project
-
@remote_repo_base = @remote.remote_repo_base
-
elsif arg
-
@remote_repo_base = arg.to_sym
-
end
-
-
5
@client = RepoManagerClient.new()
-
5
Log.debug "Using repo manager: '#{@client.rest_base_url}'"
-
end
-
-
1
def repoman_client
-
return client
-
end
-
-
1
def add_client_access(client_rsa_pub_key, client_rsa_key_name)
-
response = client.add_client_access(client_rsa_pub_key, client_rsa_key_name)
-
# we also make sure that tenant user is created
-
create_tenant_user()
-
response
-
end
-
-
1
def remove_client_access(username)
-
client.remove_client_access(username)
-
end
-
-
1
def validate_catalog_credentials(username, password)
-
client.validate_catalog_credentials(username, password)
-
end
-
-
1
def create_tenant_user()
-
username = dtk_instance_remote_repo_username()
-
rsa_pub_key = dtk_instance_rsa_pub_key()
-
rsa_key_name = dtk_instance_remote_repo_key_name()
-
-
client.create_tenant_user(username, rsa_pub_key, rsa_key_name)
-
end
-
-
1
def publish_to_remote(client_rsa_pub_key, module_refs_content = nil)
-
username = dtk_instance_remote_repo_username()
-
-
unless namespace = remote.namespace
-
namespace = CurrentSession.new.get_user_object().get_namespace()
-
Log.error("Unexpected that naemspace was null and used CurrentSession.new.get_user_object().get_namespace(): #{namespace}}")
-
end
-
-
params = {
-
:username => username,
-
:name => remote.module_name(),
-
:type => type_for_remote_module(remote.module_type),
-
:namespace => namespace
-
}
-
-
params.merge!(:module_refs_content => module_refs_content) unless is_empty?(module_refs_content)
-
-
response_data = client.publish_module(params, client_rsa_pub_key)
-
-
{:remote_repo_namespace => namespace}.merge(Aux.convert_keys_to_symbols(response_data))
-
end
-
-
1
def delete_remote_module(client_rsa_pub_key, force_delete = false)
-
raise_error_if_module_is_not_accessible(client_rsa_pub_key)
-
params = {
-
:username => dtk_instance_remote_repo_username(),
-
:name => remote.module_name,
-
:namespace => remote.namespace,
-
:type => type_for_remote_module(remote.module_type),
-
:force_delete => force_delete
-
}
-
client.delete_module(params, client_rsa_pub_key)
-
end
-
-
1
def raise_error_if_module_is_not_accessible(client_rsa_pub_key)
-
get_remote_module_info?(client_rsa_pub_key,:raise_error => true)
-
end
-
1
private :raise_error_if_module_is_not_accessible
-
-
1
def get_remote_module_info?(client_rsa_pub_key,opts={})
-
client_params = {
-
:name => remote.module_name,
-
:type => type_for_remote_module(remote.module_type),
-
:namespace => remote.namespace,
-
:rsa_pub_key => client_rsa_pub_key
-
}
-
-
client_params.merge!(:module_refs_content => opts[:module_refs_content]) unless is_empty?(opts[:module_refs_content])
-
-
ret = nil
-
begin
-
response_data = client.get_module_info(client_params)
-
ret = Aux.convert_keys_to_symbols(response_data)
-
rescue Exception => e
-
if opts[:raise_error]
-
raise e
-
else
-
return nil
-
end
-
end
-
-
ret.merge!(:remote_repo_url => RepoManagerClient.repo_url_ssh_access(ret[:git_repo_name]))
-
-
if remote.version
-
# TODO: ModuleBranch::Location:
-
raise Error.new("Not versions not implemented")
-
versions = branch_names_to_versions_stripped(ret[:branches])
-
unless versions and versions.include?(remote.version)
-
raise ErrorUsage.new("Remote module (#{remote.pp_module_name()}}) does not have version (#{remote.version||"CURRENT"})")
-
end
-
end
-
ret
-
end
-
-
1
def get_remote_module_components(client_rsa_pub_key=nil)
-
params = {
-
:name => remote.module_name,
-
:version => remote.version,
-
:namespace => remote.namespace,
-
:type => remote.module_type,
-
:do_not_raise => true,
-
:dependencies_info => true
-
}
-
@client.get_components_info(params, client_rsa_pub_key)
-
end
-
-
-
1
def remote()
-
unless @remote
-
raise Error.new("Should not be called if @remote is nill")
-
end
-
@remote
-
end
-
1
private :remote
-
-
1
def list_module_info(type=nil, rsa_pub_key = nil)
-
5
new_repo = R8::Config[:repo][:remote][:new_client]
-
5
filter = type && {:type => type_for_remote_module(type)}
-
5
remote_modules = client.list_modules(filter, rsa_pub_key)
-
-
1
unsorted = remote_modules.map do |r|
-
61
el = {}
-
61
last_updated = r['updated_at'] && Time.parse(r['updated_at']).strftime("%Y/%m/%d %H:%M:%S")
-
61
permission_string = "#{r['permission_hash']['user']}/#{r['permission_hash']['user_group']}/#{r['permission_hash']['other']}"
-
61
el.merge!(:display_name => r['full_name'], :owner => r['owner_name'], :group_owners => r['user_group_names'], :permissions => permission_string, :last_updated => last_updated)
-
61
if versions = branch_names_to_versions(r["branches"])
-
el.merge!(:versions => versions)
-
end
-
61
el
-
end
-
-
278
unsorted.sort{|a,b|a[:display_name] <=> b[:display_name]}
-
end
-
-
1
def branch_names_to_versions(branch_names)
-
61
return nil unless branch_names and not branch_names == [HeadBranchName]
-
(branch_names.include?(HeadBranchName) ? ["CURRENT"] : []) + branch_names.reject{|b|b == HeadBranchName}.sort
-
end
-
-
#
-
# method will not return 'v' in version name, when used for comparison
-
1
def branch_names_to_versions_stripped(branch_names)
-
versions = branch_names_to_versions(branch_names)
-
versions ? versions.collect { |v| v.gsub(/^v/,'') } : nil
-
end
-
-
1
private :branch_names_to_versions
-
-
1
def version_to_branch_name(version=nil)
-
self.class.version_to_branch_name(version)
-
end
-
1
def self.version_to_branch_name(version=nil)
-
Log.info_pp(["#TODO: ModuleBranch::Location: deprecating: version_to_branch_name",caller[0..4]])
-
if version.nil? or version == HeadBranchName
-
HeadBranchName
-
else
-
"v#{version}"
-
end
-
end
-
1
HeadBranchName = "master"
-
-
1
def default_remote_repo_base()
-
self.class.default_remote_repo_base()
-
end
-
1
def self.default_remote_repo_base()
-
3
RepoRemote.repo_base()
-
end
-
-
# TODO: deprecate when remove all references to these
-
1
def default_remote_repo()
-
self.class.default_remote_repo_base()
-
end
-
1
def self.default_remote_repo()
-
default_remote_repo_base()
-
end
-
-
1
def self.default_user_namespace()
-
# CurrentSession.new.get_user_object().get_namespace()
-
# we don't want username as default namespace, we will use tenant unique name instead
-
# ::DTK::Common::Aux.running_process_user()
-
Namespace.default_namespace_name
-
end
-
-
# TODO: this needs to be cleaned up
-
1
def self.default_namespace()
-
self.default_user_namespace()
-
end
-
-
1
DefaultsNamespace = "r8" #TODO: have this obtained from config file
-
-
# [Haris] We are not using r8 here since we will use tenant id, e.g. "dtk9" as default
-
# DefaultsNamespace = self.default_user_namespace() #TODO: have this obtained from config file
-
-
# example:
-
# returns namespace, name, version (optional)
-
1
def self.split_qualified_name(qualified_name, opts={})
-
raise ErrorUsage.new("Please provide module name to publish") if qualified_name.nil? || qualified_name.empty?
-
namespace = opts[:namespace]||default_namespace()
-
-
split = qualified_name.split("/")
-
case split.size
-
when 1 then [namespace,qualified_name]
-
when 2,3 then split
-
else
-
qualified_name = "NOT PROVIDED" if qualified_name.nil? || qualified_name.empty?
-
raise ErrorUsage.new("Module remote name (#{qualified_name}) ill-formed. Must be of form 'name', 'namespace/name' or 'name/namespace/version'")
-
end
-
end
-
-
1
private
-
1
attr_reader :client
-
-
1
def type_for_remote_module(module_type)
-
5
module_type.to_s.gsub(/_module$/,"")
-
end
-
-
1
def is_empty?(string_value)
-
return true if string_value.nil?
-
string_value.empty? ? true : false
-
end
-
-
1
def dtk_instance_rsa_pub_key()
-
@dtk_instance_rsa_pub_key ||= Common::Aux.get_ssh_rsa_pub_key()
-
end
-
-
1
def dtk_instance_remote_repo_username()
-
"#{dtk_instance_prefix()}-dtk-instance"
-
end
-
-
1
def dtk_instance_prefix()
-
::R8::Config[:repo][:remote][:tenant_name] || ::DTK::Common::Aux.running_process_user()
-
end
-
-
1
def dtk_instance_remote_repo_key_name()
-
"dtk-instance-key"
-
end
-
-
1
def get_end_user_remote_repo_username(mh,ssh_rsa_pub_key)
-
RepoUser.match_by_ssh_rsa_pub_key!(mh,ssh_rsa_pub_key).owner.username
-
end
-
-
end
-
end
-
end
-
-
2
module DTK; class Repo
-
1
class Remote
-
1
module AuthMixin
-
-
1
ACCESS_READ = 'R'
-
1
ACCESS_WRITE = 'W'
-
-
# TODO: ModuleBranch::Location: see why need client_rsa_pub_key
-
1
def authorize_dtk_instance(client_rsa_pub_key = nil, access_rights = nil)
-
username = dtk_instance_remote_repo_username()
-
rsa_pub_key = dtk_instance_rsa_pub_key()
-
rsa_key_name = dtk_instance_remote_repo_key_name()
-
access_rights ||= ACCESS_READ
-
-
authorize_user(username, rsa_pub_key, rsa_key_name, access_rights,remote.module_name,remote.namespace,remote.module_type,client_rsa_pub_key)
-
end
-
-
1
def authorize_end_user(mh,module_name,module_namespace,type,rsa_pub_key,access_rights)
-
username = get_end_user_remote_repo_username(mh,rsa_pub_key)
-
authorize_user(username,rsa_pub_key,access_rights.remote_repo_form(),module_name,module_namespace,type)
-
end
-
-
1
private
-
1
def authorize_user(username, rsa_pub_key, rsa_key_name, access_rights, module_name, module_namespace, type, client_rsa_pub_key = nil)
-
grant_user_rights_params = {
-
:name => module_name,
-
:namespace => module_namespace || DefaultsNamespace,
-
:type => type_for_remote_module(type),
-
:username => username,
-
:access_rights => access_rights
-
}
-
# TODO: [Haris] We do want to keep API same until repo client since we need to support two clients
-
client.grant_user_access_to_module(grant_user_rights_params, client_rsa_pub_key)
-
end
-
-
# matches namespace from the name remote_repo e.g. "dtk"
-
1
def get_namespace(remote_repo_name)
-
if remote_repo_name
-
remote_repo_name.scan(/\A.*?(?=--)/).first
-
end
-
end
-
-
end
-
-
1
class AccessError < ErrorUsage
-
1
def initialize(remote_repo,access_rights=nil)
-
super(error_msg(remote_repo,access_rights))
-
end
-
1
private
-
1
def error_msg(remote_repo,access_rights=nil)
-
if access_rights
-
"#{access_rights.pp_form()} access rights denied to remote repo #{remote_repo}"
-
else
-
"Access denied to remote repo #{remote_repo}"
-
end
-
end
-
end
-
1
class AccessRights
-
1
class R < self
-
1
def self.remote_repo_form()
-
"R"
-
end
-
1
def self.pp_form()
-
"Read"
-
end
-
end
-
1
class RW < self
-
1
def self.remote_repo_form()
-
"RW+"
-
end
-
1
def self.pp_form()
-
"Read/Write"
-
end
-
end
-
1
def self.convert_from_string_form(rights)
-
case rights
-
when "r" then R
-
when "rw" then RW
-
else raise ErrorUsage("Illegal access rights string '#{rights}'")
-
end
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Repo
-
1
class WithBranch < self
-
1
def self.create_workspace_repo(project_idh,local,repo_user_acls,opts={})
-
repo_mh = project_idh.createMH(:repo)
-
ret = create_obj?(repo_mh,local)
-
repo_idh = repo_mh.createIDH(:id => ret.id)
-
RepoUserAcl.modify_model(repo_idh,repo_user_acls)
-
RepoManager.create_workspace_repo(ret,repo_user_acls,opts)
-
ret
-
end
-
-
1
def initial_sync_with_remote(remote,remote_repo_info)
-
unless R8::Config[:repo][:workspace][:use_local_clones]
-
raise Error.new("Not implemented yet: initial_sync_with_remote_repo w/o local clones")
-
end
-
-
remote_url = remote.repo_url()
-
remote_ref = remote.remote_ref()
-
remote_branch = remote.branch_name()
-
-
if remote_branches = remote_repo_info[:branches]
-
unless remote_branches.include?(remote_branch)
-
raise ErrorUsage.new("Cannot find selected version on remote repo #{remote_repo_info[:full_name]||''}")
-
end
-
end
-
commit_sha = RepoManager.initial_sync_with_remote_repo(branch_name(),get_field?(:repo_name),remote_ref,remote_url,remote_branch)
-
commit_sha
-
end
-
-
1
private
-
1
def self.create_obj?(model_handle,local)
-
repo_name = repo_name(local)
-
branch_name = local.branch_name
-
sp_hash = {
-
:cols => common_columns(),
-
:filter => [:eq,:repo_name,repo_name]
-
}
-
unless repo_obj = get_obj(model_handle,sp_hash)
-
repo_hash = {
-
:ref => repo_name,
-
:display_name => repo_name,
-
:repo_name => repo_name,
-
:local_dir => "#{R8::Config[:repo][:base_directory]}/#{repo_name}" #TODO: should this be set by RepoManager instead
-
}
-
repo_idh = create_from_row(model_handle,repo_hash)
-
repo_obj = repo_idh.create_object(:model_name => :repo_with_branch).merge(repo_hash)
-
end
-
set_branch_name!(repo_obj,branch_name)
-
end
-
-
1
def self.set_branch_name!(repo_obj,branch_name)
-
repo_obj.merge!(:branch_name => branch_name)
-
end
-
1
def branch_name()
-
unless ret = self[:branch_name]
-
raise Error.new("Unexpected that self[:branch_name] is null for: #{inspect()}")
-
end
-
ret
-
end
-
-
1
def self.repo_name(local)
-
local.private_user_repo_name()
-
end
-
-
1
def self.get_objs(mh,sp_hash,opts={})
-
model_handle = (mh[:model_name] == :repo_with_branch ? mh.createMH(:repo) : mh)
-
super(model_handle,sp_hash,{:subclass_model_name => :repo_with_branch}.merge(opts))
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class RepoRemote < Model
-
-
1
def print_form(opts=Opts.new)
-
45
ret = self[:display_name]||'' #'' just to be safe
-
45
ret = "#{DTKNCatalogPrefix}#{ret}" if opts[:dtkn_prefix]
-
45
ret = "#{DefaultMarker}#{ret}" if opts[:is_default_namespace]
-
45
ret
-
end
-
-
1
GIT_REPO_PROVIDERS = ['github','bitbucket','dtkn']
-
1
DTKN_PROVIDER = 'dtkn'
-
-
#
-
# remote location regex is used to detrmine if we are using special case of SSH git url e.g.
-
# git@bitbucket.org:hkraji/stdlib.git/stdlib
-
#
-
# if so than we are pushing to URL but with location path provided after SSH git url (e.g. /stdlib)
-
#
-
1
REMOTE_LOCATION_REGEX = /(.*\.git)(\/.*)$/
-
-
1
DTKNCatalogPrefix = 'dtkn://'
-
1
RemoteRepoBase = :dtknet
-
1
DefaultMarker = '*'
-
-
1
def self.repo_base()
-
3
RemoteRepoBase
-
end
-
-
1
def self.git_provider_name(url_of_provider)
-
GIT_REPO_PROVIDERS.each do |provider|
-
return provider if url_of_provider.match(/(@|\/)#{provider}/)
-
end
-
-
GIT_REPO_PROVIDERS.last
-
end
-
-
1
def url_ssh_access()
-
RepoManagerClient.repo_url_ssh_access(get_field?(:repo_name))
-
end
-
-
1
def git_provider_name()
-
RepoRemote.git_provider_name(git_remote_url())
-
end
-
-
1
def is_dtkn_provider?
-
GIT_REPO_PROVIDERS.last.eql?(git_provider_name())
-
end
-
-
1
def git_remote_url()
-
get_field?(:repo_url) || self.url_ssh_access()
-
end
-
-
1
def base_git_remote_url()
-
url = git_remote_url()
-
url = url.match(REMOTE_LOCATION_REGEX)[1] if is_there_remote_location?
-
url
-
end
-
-
#
-
# Based on git location - it sees if one is provided
-
#
-
1
def base_git_remote_location()
-
is_there_remote_location? ? git_remote_url().match(REMOTE_LOCATION_REGEX)[2] : nil
-
end
-
-
#
-
# Checks if git remote location is specified
-
#
-
1
def is_there_remote_location?()
-
!!git_remote_url().match(REMOTE_LOCATION_REGEX)
-
end
-
-
1
def remote_ref()
-
if is_dtkn_provider?
-
Repo.remote_ref(RemoteRepoBase, get_field?(:repo_namespace))
-
else
-
remote_url = git_remote_url()
-
-
if remote_url.start_with?('git')
-
repo_name =remote_url.split(':').last().split('/').join('-').gsub(/\.git/, '')
-
else
-
repo_name = remote_url.split('/').last(2).join('-').gsub(/\.git/, '')
-
end
-
# example: hkraji-stdlib-github
-
"#{repo_name}-#{git_provider_name}"
-
end
-
end
-
-
1
def self.create_git_remote(repo_remote_mh, repo_id, repo_name, repo_url, is_default = false)
-
# check to see if repo remote exists
-
repo_remotes = get_objs(repo_remote_mh, { :filter => [:and, [:eq, :display_name, repo_name], [:eq, :repo_id, repo_id]]})
-
-
unless repo_remotes.empty?
-
raise ErrorUsage, "Remote identifier '#{repo_name}' already exists"
-
end
-
-
unless repo_url.match(/^git@.*:.*\.git\/?.*?$/)
-
raise ErrorUsage, "We are sorry, we only support SSH remotes - provided URL does not seem to be proper SSH url"
-
end
-
-
remote_repo_create_hash = {
-
:repo_name => repo_name,
-
:display_name => repo_name,
-
:ref => repo_name,
-
:repo_id => repo_id,
-
:repo_url => repo_url,
-
:is_default => is_default
-
}
-
-
create_from_row(repo_remote_mh, remote_repo_create_hash)
-
end
-
-
1
def self.delete_git_remote(repo_remote_mh, repo_name, repo_id)
-
repo_remotes = get_objs(repo_remote_mh, { :filter => [:and, [:eq, :display_name, repo_name], [:eq, :repo_id, repo_id]] })
-
-
if repo_remotes.empty?
-
raise ErrorUsage, "Remote '#{repo_name}' not found"
-
end
-
-
repo_remotes.each { |rr| rr.delete_instance(rr.id_handle) }
-
end
-
-
1
def self.create_repo_remote(repo_remote_mh, module_name, repo_name, repo_namespace, repo_id, opts=Opts.new)
-
is_default =
-
if opts[:set_as_default]
-
true
-
elsif opts[:set_as_default_if_first]
-
get_matching_remote_repos(repo_remote_mh,repo_id, module_name).size == 0
-
else
-
false
-
end
-
-
remote_repo_create_hash = {
-
:repo_name => repo_name,
-
:display_name => "#{repo_namespace}/#{module_name}",
-
:repo_namespace => repo_namespace,
-
:repo_id => repo_id,
-
:ref => module_name,
-
:is_default => is_default
-
}
-
-
create_from_row(repo_remote_mh,remote_repo_create_hash)
-
end
-
-
1
def self.set_default_remote(repo_remote_mh, repo_id, repo_name)
-
repo_remote = get_obj(repo_remote_mh, { :filter => [:and, [:eq, :display_name, repo_name], [:eq, :repo_id, repo_id]] })
-
-
raise ErrorUsage, "Not able to find remote '#{repo_name}', aborting action." unless repo_remote
-
-
default_repo_remote = get_obj(repo_remote_mh, { :filter => [:and, [:eq, :is_default, true], [:eq, :repo_id, repo_id]] })
-
-
# set as not active (default)
-
update_from_rows(repo_remote_mh, [ { :id => default_repo_remote.id, :is_default => false } ]) if default_repo_remote
-
-
# set as active (default)
-
update_from_rows(repo_remote_mh, [ { :id => repo_remote.id, :is_default => true } ])
-
end
-
-
1
def self.delete_repos(idh_list)
-
delete_instances(idh_list)
-
end
-
-
1
def self.get_remote_repo(repo_remote_mh,repo_id, module_name, repo_namespace)
-
matches = get_matching_remote_repos(repo_remote_mh,repo_id, module_name, repo_namespace)
-
if matches.size > 1
-
Log.error("Unexpected to have multiple matches in get_remote_repo (#{matches.inspect})")
-
# will pick first one
-
end
-
matches.first
-
end
-
-
1
def self.get_matching_remote_repos(repo_remote_mh,repo_id, module_name, repo_namespace=nil)
-
sp_hash = {
-
:cols => [:id, :display_name, :repo_name],
-
:filter =>
-
[:and,
-
[:eq, :repo_id, repo_id],
-
repo_namespace && [:eq, :repo_namespace, repo_namespace],
-
[:eq, :ref, module_name]
-
].compact
-
}
-
get_objs(repo_remote_mh, sp_hash)
-
end
-
-
1
def self.extract_module_name(repo_name)
-
repo_name.split(/\-\-.{2}\-\-/).last
-
end
-
-
1
def self.create_repo_remote?(repo_remote_mh, module_name, repo_name, repo_namespace, repo_id)
-
get_remote_repo(repo_remote_mh, repo_id, module_name, repo_namespace) ||
-
create_repo_remote(repo_remote_mh, module_name, repo_name, repo_namespace, repo_id)
-
end
-
-
1
def remote_dtkn_location(project,module_type,module_name)
-
remote_params = ModuleBranch::Location::RemoteParams::DTKNCatalog.new(
-
:module_type => module_type,
-
:module_name => module_name,
-
:namespace => get_field?(:repo_namespace),
-
:remote_repo_base => self.class.repo_base()
-
)
-
remote_params.create_remote(project).set_repo_name!(get_field?(:repo_name))
-
end
-
-
1
def self.default_from_module_branch?(module_branch)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:repo_name,:repo_namespace,:is_default,:created_at],
-
:filter => [:eq,:repo_id,module_branch.get_field?(:repo_id)]
-
}
-
ret = get_objs(module_branch.model_handle(:repo_remote),sp_hash)
-
1 == ret.size ? ret.first : ret_default_remote_repo(ret)
-
end
-
-
1
def self.ret_default_remote_repo(repo_remotes)
-
# Making robust in case multiple ones marked default
-
35
pruned = repo_remotes.select{|r|r.get_field?(:is_default)}
-
9
if pruned.empty?
-
compute_default_remote_repo(repo_remotes)
-
9
elsif pruned.size == 1
-
9
pruned.first
-
else
-
Log.error("Multiple default remotes found (#{pruned.map{|r|r[:display_name]}.join('')})")
-
compute_default_remote_repo(pruned)
-
end
-
end
-
-
1
def self.default_remote!(repo_remote_mh, repo_id)
-
repo_remote = get_obj(repo_remote_mh, { :filter => [:and, [:eq, :is_default, true], [:eq, :repo_id, repo_id]] })
-
raise ErrorUsage, "Not able to find default remote for given repo!" unless repo_remote
-
repo_remote
-
end
-
-
1
private
-
-
# TODO: deprecate once all data is migrated so :is_default is marked
-
1
def self.compute_default_remote_repo(repo_remotes)
-
Log.info("Calling compute_default_remote_repo on (#{repo_remotes.map{|r|r.get_field?(:display_name)}.join(',')})")
-
unless (repo_remotes||[]).empty?
-
# TODO: enhance so that default is one taht matche's user's default namespace
-
# set on augmented_module_branch[:repo] fields associated with the default namespace
-
# we sort descending by created date
-
# default is the one which is the oldest
-
repo_remotes.each{|r|r.update_object!(:created_at)}
-
default_repo_remote = repo_remotes.sort {|a,b| a[:created_at] <=> b[:created_at]}.first
-
default_repo_remote.update(:is_default => true) #migrate it
-
default_repo_remote
-
end
-
end
-
-
end
-
end
-
1
module DTK
-
1
class RepoUser < Model
-
-
1
SSH_KEY_EXISTS = "Provided RSA public key already exists for another user"
-
-
### Attributes ###
-
-
1
def self.common_columns()
-
5
[:id,
-
:group_id,
-
:username,
-
:type,
-
:index,
-
:ssh_rsa_pub_key,
-
:component_module_direct_access,
-
:service_module_direct_access,
-
:repo_manager_direct_access
-
]
-
end
-
-
### Instance methods ###
-
-
-
# Returns flag which indicates if this user has been created on Repoman
-
#
-
1
def has_repoman_direct_access?
-
self[:repo_manager_direct_access]
-
end
-
-
1
def owner
-
self.update_object!(:owner_id)
-
User.get_user_by_id(self.model_handle(:user), self[:owner_id])
-
end
-
-
1
def rsa_key_name
-
self.update_object!(:display_name) unless self[:display_name]
-
self[:display_name]
-
end
-
-
1
def rsa_pub_key
-
self.update_object!(:ssh_rsa_pub_key) unless self[:ssh_rsa_pub_key]
-
self[:ssh_rsa_pub_key]
-
end
-
-
1
def owner_username
-
owner.username
-
end
-
-
# Returns flag which indicates if this user has access to component_modules or service_modules
-
#
-
1
def has_direct_access?(module_model_name,opts={})
-
direct_access_col = direct_access_col(module_model_name)
-
update_object!(direct_access_col) unless opts[:donot_update]
-
self[direct_access_col]
-
end
-
-
-
# Returns flag which indicates if there is direct access in exception to provided in param
-
#
-
1
def any_direct_access_except?(module_model_name)
-
case module_model_name
-
when :component_module then has_direct_access?(:service_module)
-
when :service_module then has_direct_access?(:component_module)
-
else raise Error.new("Illegal module model name (#{module_model_name})")
-
end
-
end
-
-
# Updates flag for direct access
-
# Params:
-
# module_model_name (sym)
-
# val (boolean)
-
#
-
1
def update_direct_access(module_model_name,val)
-
direct_access_col = direct_access_col(module_model_name)
-
update(direct_access_col => val)
-
self[direct_access_col] = val
-
self
-
end
-
-
### Class methods ###
-
-
# Find user by SSH PUB key
-
#
-
1
def self.match_by_ssh_rsa_pub_key!(mh, ssh_rsa_pub_key)
-
5
ret = find_by_pub_key(mh, ssh_rsa_pub_key)
-
-
5
unless ret
-
raise ErrorUsage.new("The SSH public key for the client machine has not been registered, have you added SSH key for this client?")
-
end
-
-
5
ret
-
end
-
-
-
1
def self.find_by_pub_key(model_handle, ssh_rsa_pub_key)
-
5
sp_hash = {
-
:cols => common_columns(),
-
:filter => [:eq, :ssh_rsa_pub_key, ssh_rsa_pub_key]
-
}
-
-
5
get_obj(model_handle.createMH(:repo_user), sp_hash)
-
end
-
-
-
1
def self.authorized_users_acls(model_handle)
-
authorized_users(model_handle).map do |repo_username|
-
{
-
:repo_username => repo_username,
-
:access_rights => AuthorizedUserDefaultRights
-
}
-
end
-
end
-
1
AuthorizedUserDefaultRights = 'RW+'
-
1
def self.authorized_users(model_handle)
-
get_objs(model_handle.createMH(:repo_user), :cols => [:id,:username]).map{|r|r[:username]}
-
end
-
1
private_class_method :authorized_users
-
-
-
# returns an object or calls block (with new or existing object)
-
1
def self.add_repo_user?(repo_user_type, repo_user_mh, ssh_rsa_keys={},username=nil)
-
# for match on type; use following logic
-
# if ssh public key given look for match on this
-
# otherwise return error if there is multiple matches for node or admin type
-
existing_users = get_existing_repo_users(repo_user_mh,:type => repo_user_type.to_s)
-
if ssh_rsa_pub_key = ssh_rsa_keys[:public]
-
match = existing_users.find{|r|r[:ssh_rsa_pub_key] == ssh_rsa_pub_key}
-
return match, true if match
-
-
# get all public key files from gitolite_admin keydir
-
# and raise exception if file with provided rsa_public_key exists already
-
gitolite_admin_keydir = RepoManager.get_keydir()
-
pub_keys = Dir.entries(gitolite_admin_keydir).select{|key| key.to_s.include?(".pub")}
-
-
pub_keys.each do |key|
-
key_content = File.read("#{gitolite_admin_keydir}/#{key}")
-
if (key_content == ssh_rsa_pub_key)
-
Log.info("Provided RSA public key already exists for another user, other user's keydir (#{key.to_s})")
-
raise ErrorUsage.new(SSH_KEY_EXISTS)
-
end
-
end
-
else
-
case existing_users.size
-
when 0
-
when 1
-
return existing_users.first
-
else
-
if [:admin,:node].include?(repo_user_type)
-
raise Error.new("Unexpected to have multiple matches of repo user type (#{repo_user_type})")
-
end
-
end
-
end
-
-
add_repo_user(repo_user_type,repo_user_mh,ssh_rsa_keys,existing_users,username)
-
end
-
-
# ssh_rsa_keys[:public].nil? means that expected that key already exists in the gitolite admin db
-
1
def self.add_repo_user(repo_user_type,repo_user_mh,ssh_rsa_keys={},existing_users=[],username=nil)
-
repo_username,index = ret_new_repo_username_and_index(repo_user_type,existing_users,username)
-
if ssh_rsa_keys[:public]
-
RepoManager.add_user(repo_username,ssh_rsa_keys[:public],:noop_if_exists => true)
-
end
-
create_instance(repo_user_mh,repo_user_type,repo_username,index,ssh_rsa_keys)
-
end
-
-
1
def self.get_matching_repo_users(repo_user_mh,filters_keys,username,cols=nil)
-
repo_users = get_existing_repo_users(repo_user_mh,filters_keys,cols)
-
end
-
-
1
def self.get_matching_repo_user(repo_user_mh,filters_keys,cols=nil)
-
ret = nil
-
repo_users = get_existing_repo_users(repo_user_mh,filters_keys,cols)
-
if repo_users.size > 1
-
Log.error("Unexpected to have multiple matches of repo user when matching on (#{filters_keys.inspect})")
-
end
-
repo_users.first
-
end
-
-
1
def self.get_by_repo_username(model_handle,repo_username)
-
sp_hash = {
-
:cols => [:id,:username,:repo_manager_direct_access],
-
:filter => [:eq,:username,repo_username]
-
}
-
get_obj(model_handle,sp_hash)
-
end
-
-
1
private
-
-
-
### Private instance methods ###
-
-
1
def direct_access_col(module_model_name)
-
case module_model_name
-
when :component_module then :component_module_direct_access
-
when :service_module then :service_module_direct_access
-
else raise Error.new("Illegal module model name (#{module_model_name})")
-
end
-
end
-
-
### Private class methods ###
-
-
1
def self.get_existing_repo_users(repo_user_mh,filter_keys={},cols=nil)
-
sp_hash = {
-
:cols => cols ? (cols+[:id,:group_id]) : common_columns()
-
}
-
unless filter_keys.empty?
-
filter_list = filter_keys.map{|k,v|[:eq,k,v.to_s]}
-
sp_hash[:filter] = (filter_list.size == 1 ? filter_list.first : ([:and] + filter_list))
-
end
-
get_objs(repo_user_mh,sp_hash)
-
end
-
-
-
1
def self.ret_new_repo_username_and_index(type,existing_matches,username)
-
if type == :admin
-
# TODO: r8sserver will eb deprecated
-
new_repo_username = R8::Config[:admin_repo_user]||"dtk-admin-#{R8::Config[:dtk_instance_user]}"
-
new_index = 1
-
elsif username
-
new_repo_username = username
-
new_index = 0
-
else
-
max = 0
-
existing_matches.each do |m|
-
if m[:index] > max
-
max = m[:index]
-
end
-
end
-
new_index = max+1
-
suffix = (new_index == 1 ? "" : "-#{new_index.to_s}")
-
username = CurrentSession.new.get_username()
-
new_repo_username = "dtk-#{type}-#{username}#{suffix}"
-
end
-
[new_repo_username,new_index]
-
end
-
-
1
def self.create_instance(model_handle,type,repo_username,index,ssh_rsa_keys={})
-
create_row = {
-
:ref => repo_username,
-
:display_name => repo_username,
-
:username => repo_username,
-
:index => index,
-
:type => type.to_s,
-
:ssh_rsa_pub_key => ssh_rsa_keys[:public],
-
:ssh_rsa_private_key => ssh_rsa_keys[:private]
-
}
-
new_idh = create_from_row(model_handle,create_row)
-
new_idh.create_object.merge(create_row)
-
end
-
end
-
end
-
1
module XYZ
-
1
class RepoUserAcl < Model
-
1
def self.update_model(repo,repo_user,new_access_rights)
-
existing = repo.get_acesss_rights(repo_user.id_handle())
-
if existing
-
return if existing[:access_rights] == new_access_rights
-
update_row = {
-
:id => existing[:id],
-
:access_rights => new_access_rights
-
}
-
update_from_rows(repo.model_handle(:repo_user_acl),[update_row])
-
else
-
repo_user.update_object!(:username)
-
create_row = ret_create_hash(repo_user[:username],repo[:id],repo_user[:id],new_access_rights)
-
create_from_row(repo.model_handle(:repo_user_acl),create_row)
-
end
-
end
-
-
# TODO: see if can simplify and move into Repo using input_hash_content_into_model with nested hash
-
1
def self.modify_model(repo_idh,repo_user_acls)
-
repo_id = repo_idh.get_id()
-
# TODO: more efficient if RepoUser.get_by_repo_username takes a list
-
repo_user_mh = repo_idh.createMH(:repo_user)
-
rows = repo_user_acls.map do |acl|
-
repo_username = acl[:repo_username]
-
unless repo_user_obj = RepoUser.get_by_repo_username(repo_user_mh,repo_username)
-
raise Error.new("Unknown repo user (#{repo_username})")
-
end
-
ret_create_hash(repo_username, repo_id,repo_user_obj[:id],acl[:access_rights])
-
end
-
model_handle = repo_idh.create_childMH(:repo_user_acl)
-
modify_children_from_rows(model_handle,repo_idh,rows)
-
end
-
-
1
private
-
1
def self.ret_create_hash(repo_username,repo_id,repo_user_id,access_rights)
-
{
-
:ref => repo_username,
-
:display_name => repo_username,
-
:repo_id => repo_id,
-
:repo_user_id => repo_user_id,
-
:access_rights => access_rights
-
}
-
end
-
end
-
end
-
1
require File.expand_path('search_pattern_parser', File.dirname(__FILE__))
-
-
1
module XYZ
-
1
class SearchObject < Model
-
1
def json_search_pattern()
-
search_pattern ? JSON.generate(search_pattern) : nil
-
end
-
-
1
def create_dataset()
-
30
SQL::DataSetSearchPattern.create_dataset_from_search_object(self)
-
end
-
-
1
attr_accessor :save_flag, :source
-
-
1
def self.create_from_input_hash(input_hash,source,c)
-
30
raise Error.new("search object is ill-formed") unless is_valid?(input_hash)
-
30
sp = nil_if_empty(input_hash["search_pattern"])
-
30
hash = {
-
:id => nil_if_empty(input_hash["id"]),
-
:display_name => nil_if_empty(input_hash["display_name"]),
-
:search_pattern => sp ? SearchPattern.create(sp) : nil
-
}
-
30
ret = SearchObject.new(hash,c)
-
30
ret.save_flag = input_hash["save"]
-
30
ret.source = source
-
30
ret
-
end
-
-
1
def self.create_from_field_set(field_set,c,filter=nil)
-
sp = {:relation => field_set.model_name, :columns => field_set.cols}
-
sp.merge!(:filter => filter) if filter
-
hash = {:search_pattern => SearchPattern.create(sp)}
-
SearchObject.new(hash,c)
-
end
-
-
1
def json()
-
sp = self[:search_pattern]
-
-
# TODO: this is for case when get back search objects from get objects; remove when process uniformally meaning non null will always be serach pattern object
-
hash_for_json_sp = sp ? (sp.kind_of?(SearchPattern) ? sp.hash_for_json_generate() : sp) : nil
-
-
hash_for_json_generate = {
-
"display_name" => self[:display_name],
-
"relation" => self[:relation],
-
"id" => self[:id],
-
"search_pattern" => hash_for_json_sp
-
}
-
JSON.generate(hash_for_json_generate)
-
end
-
-
1
def should_save?
-
return nil unless search_pattern
-
return true if save_flag
-
not(search_pattern.is_default_view?() or source == :action_set or source == :node_group)
-
end
-
-
=begin
-
Remove
-
def save_list_view_in_cache?(user)
-
return nil unless should_save?
-
view_meta_hash = search_pattern ? search_pattern.create_list_view_meta_hash() : nil
-
raise Error.new("cannot create list_view meta hash") unless view_meta_hash
-
is_saved_search = true
-
-
raise Error::NotImplemented.new("when search_pattern.relation is of type #{search_pattern.relation.class}") unless search_pattern.relation.kind_of?(Symbol)
-
view = R8Tpl::ViewR8.new(search_pattern.relation,saved_search_ref(),user,is_saved_search,view_meta_hash)
-
# TODO: this necssarily updates if reaches here; more sophistiacted woudl update cache file only if need to
-
view.update_cache_for_saved_search()
-
end
-
-
def self.save_list_view_in_cache(id,hash_assignments,user)
-
search_pattern_json = hash_assignments[:search_pattern]
-
return nil unless search_pattern_json
-
search_pattern = SearchPattern.create(JSON.parse(search_pattern_json))
-
view_meta_hash = search_pattern.create_list_view_meta_hash()
-
return nil unless search_pattern.relation
-
is_saved_search = true
-
view = R8Tpl::ViewR8.new(search_pattern.relation,saved_search_ref(id),user,is_saved_search,view_meta_hash)
-
view.update_cache_for_saved_search()
-
end
-
=end
-
-
1
def save(model_handle)
-
search_pattern_db = search_pattern.ret_form_for_db()
-
relation_db = (search_pattern||{})[:relation] ? search_pattern[:relation].to_s : nil
-
if @id_handle
-
raise Error.new("saved search cannot be updated unless there is a name or search a pattern") unless search_pattern or name
-
hash_assignments = Hash.new
-
hash_assignments[:display_name] = name if name
-
hash_assignments[:search_pattern] = search_pattern_db if search_pattern_db
-
hash_assignments[:relation] = relation_db if relation_db
-
self.class.update_from_hash_assignments(@id_handle,hash_assignments)
-
else
-
raise Error.new("saved search cannot be created if search_pattern or relation does not exist") unless search_pattern_db and relation_db
-
factory_idh = model_handle.createIDH(:uri => "/search_object", :is_factory => true)
-
hash_assignments = {
-
:display_name => name || "search_object",
-
:search_pattern => search_pattern_db,
-
:relation => relation_db
-
}
-
ref = hash_assignments[:display_name]
-
create_hash = {ref => hash_assignments}
-
new_id = Model.create_from_hash(factory_idh,create_hash).map{|x|x[:id]}.first
-
@id_handle = IDHandle[:c => @c, :id => new_id, :model_name => :search_object]
-
end
-
id()
-
end
-
-
1
def needs_to_be_retrieved?()
-
(id and not search_pattern) ? true : nil
-
end
-
-
1
def retrieve_from_saved_object!()
-
raise Error.new("cannot update without an id") unless id()
-
saved_object = self.class.get_objects(model_handle,{:id => id()}).first
-
raise Error.new("cannot find saved search with id (#{id.to_s})") unless saved_object
-
saved_object.each do |k,v|
-
next unless v
-
self[k] = k == :search_pattern ? SearchPattern.create(v) : v
-
end
-
end
-
-
1
def self.is_valid?(input_hash)
-
# TODO: can do finer grain validation
-
30
(nil_if_empty(input_hash["id"]) or nil_if_empty(input_hash["search_pattern"])) ? true : nil
-
end
-
-
1
def db()
-
38
self.class.db()
-
end
-
-
1
def search_pattern()
-
150
self[:search_pattern]
-
end
-
-
1
def related_remote_column_info(vcol_sql_fns=nil)
-
30
search_pattern ? search_pattern.related_remote_column_info(vcol_sql_fns) : nil
-
end
-
1
def field_set()
-
30
search_pattern ? search_pattern.field_set() : nil
-
end
-
1
def order_by()
-
search_pattern ? search_pattern.order_by() : nil
-
end
-
1
def paging()
-
search_pattern ? search_pattern.paging() : nil
-
end
-
-
1
def id()
-
@id_handle ? @id_handle.get_id() : nil
-
end
-
1
def name()
-
self[:display_name]
-
end
-
-
1
def saved_search_template_name()
-
"#{saved_search_model_name()}/#{saved_search_ref()}" if saved_search_model_name() and saved_search_ref()
-
end
-
-
1
private
-
1
def saved_search_model_name()
-
:saved_search
-
end
-
1
def self.saved_search_ref(id)
-
id ? "ss-#{id.to_s}" : nil
-
end
-
1
def saved_search_ref()
-
self.class.saved_search_ref(id)
-
end
-
-
1
def self.nil_if_empty(x)
-
150
(x.respond_to?(:empty?) and x.empty?) ? nil : x
-
end
-
end
-
end
-
# TODO: replace with relative dir
-
1
r8_require("#{UTILS_DIR}/generate_list_meta_view")
-
1
module XYZ
-
1
class SearchPattern < HashObject
-
1
def self.create(hash_search_pattern)
-
# TODO: case on whether simple or complex
-
30
SearchPatternSimple.new(hash_search_pattern)
-
end
-
1
def self.create_just_filter(hash_search_pattern)
-
SearchPatternSimple.new(hash_search_pattern,:keys=>[:filter])
-
end
-
1
def self.process_symbols(obj)
-
if obj.kind_of?(Array)
-
obj.map{|x|process_symbols(x)}
-
elsif obj.kind_of?(Hash)
-
obj.inject({}){|h,kv|h.merge(process_symbols(kv[0]) => process_symbols(kv[1]))}
-
elsif obj.kind_of?(Symbol)
-
":#{obj}"
-
else
-
obj
-
end
-
end
-
end
-
-
1
module HashSearchPattern
-
# TODO: should unify with parsing in utils/internal/dataset_from_search_pattern.rb; and may do away with having to deal with symbol and variant forms
-
1
def self.add_to_filter(hash_search_pattern,hash_filter)
-
6
filter = augment_filter(index(hash_search_pattern,:filter),hash_filter)
-
6
merge(hash_search_pattern,{:filter => filter})
-
end
-
-
1
private
-
1
def self.augment_filter(hash_filter,hash_filter_addition)
-
6
to_add = [hash_filter_addition]
-
6
if hash_filter.nil?
-
6
[:and] + to_add
-
elsif match(hash_filter.first,:and)
-
hash_filter + to_add
-
else
-
[:and] + [hash_filter] + to_add
-
end
-
end
-
-
1
def self.symbol_persistent_form(symbol,opts={})
-
12
opts[:is_symbol] ? ":#{symbol}".to_sym : ":#{symbol}"
-
end
-
-
1
def self.merge(hash,to_add,opts={})
-
12
to_add.inject(hash){|h,kv|h.merge(select_index_form(h,kv[0],opts) => kv[1])}
-
end
-
-
1
def self.select_index_form(hash,symbol_index,opts={})
-
6
return symbol_index if hash[symbol_index]
-
6
symbol_persistent_form = symbol_persistent_form(symbol_index,opts)
-
6
hash[symbol_persistent_form] ? symbol_persistent_form : symbol_index
-
end
-
-
1
def self.index(hash,symbol_index,opts={})
-
6
hash[symbol_index]||hash[symbol_persistent_form(symbol_index,opts)]
-
end
-
1
def self.match(term,symbol,opts={})
-
term == symbol or term == symbol_persistent_form(symbol,opts)
-
end
-
end
-
-
# TODO: add a more complex search patterm which is joins/link following of simple patterms
-
1
class SearchPatternSimple < SearchPattern
-
1
def initialize(hash_search_pattern,opts={})
-
30
super()
-
30
parse_and_set!(hash_search_pattern,opts)
-
end
-
-
1
def self.ret_parsed_comparison(expr)
-
(expr[1].kind_of?(Symbol) ? {:col => expr[1], :constant => expr[2]} : {:col => expr[2], :constant => expr[1]}).merge(:op => expr[0])
-
end
-
-
1
def break_filter_into_conjunctions()
-
return [] if self[:filter].nil? or self[:filter].empty?
-
break_into_conjunctions(self[:filter])
-
end
-
1
private
-
1
def break_into_conjunctions(expr)
-
return [expr] unless expr.first == :and
-
expr[1..expr.size-1].inject([]) do |a,x|
-
a + break_into_conjunctions(x)
-
end
-
end
-
1
public
-
-
1
def hash_for_json_generate()
-
ret = process_symbols(self)
-
# TODO: would be nice to get rid of this hack
-
ret[":relation"] = ret[":relation"] ? ret[":relation"].gsub(/^:/,"") : nil
-
ret
-
end
-
-
1
def related_remote_column_info(vcol_sql_fns=nil)
-
30
field_set().related_remote_column_info(vcol_sql_fns)
-
end
-
-
1
def field_set()
-
# TBD: stub; must take out non scalars
-
90
model_name = relation.kind_of?(Symbol) ? relation : nil
-
90
if columns.empty?
-
model_name ? Model::FieldSet.default(model_name) : nil
-
else
-
90
Model::FieldSet.new(model_name,columns)
-
end
-
end
-
-
1
def is_default_view?()
-
(columns.empty? and filter.empty? and relation.kind_of?(Symbol)) ? true : nil
-
end
-
-
1
def find_key(type)
-
150
find_key_from_input(type,self)
-
end
-
-
1
def order_by()
-
self[:order_by]
-
end
-
1
def relation()
-
180
self[:relation]
-
end
-
1
def paging()
-
self[:paging]
-
end
-
-
1
def create_list_view_meta_hash()
-
# TODO: this is very simple; this will be enhanced
-
generate_list_meta_view(columns,relation)
-
end
-
-
1
def ret_form_for_db()
-
process_symbols(self)
-
end
-
1
private
-
1
include GenerateListMetaView
-
1
def process_symbols(obj)
-
SearchPattern.process_symbols(obj)
-
end
-
-
1
def find_key_from_input(type,hash_input)
-
1148
pair = hash_input.find{|k,v|ret_symbol(k) == type}
-
320
pair ? pair[1] : nil
-
end
-
-
1
def columns()
-
180
self[:columns]
-
end
-
1
def filter
-
self[:filter]
-
end
-
-
1
def parse_and_set!(hash_input,opts={})
-
30
self[:relation] = ret_relation(hash_input) unless donot_ret_key([:relation,:model_name],opts)
-
30
self[:columns] = ret_columns(hash_input) unless donot_ret_key([:columns,:cols],opts)
-
30
self[:filter] = ret_filter(hash_input) unless donot_ret_key(:filter,opts)
-
30
self[:order_by] = ret_order_by(hash_input) unless donot_ret_key(:order_by,opts)
-
30
self[:paging] = ret_paging(hash_input) unless donot_ret_key(:paging,opts)
-
end
-
1
def donot_ret_key(key_or_keys,opts)
-
150
return nil unless opts[:keys]
-
(opts[:keys] & Array(key_or_keys)).empty?
-
end
-
-
# TODO: move to using model_name, not relation
-
1
def ret_relation(hash_input)
-
30
relation_str = find_key_from_input(:relation,hash_input)||find_key_from_input(:model_name,hash_input)
-
30
return nil unless relation_str
-
30
ret_symbol(relation_str)
-
end
-
-
1
def ret_columns(hash_input)
-
30
columns = find_key_from_input(:columns,hash_input)||find_key_from_input(:cols,hash_input)
-
30
return Array.new if columns.nil? or columns.empty?
-
30
raise ErrorParsing.new(:columns,columns) unless columns.kind_of?(Array)
-
# form will be an array with each term either token or {:foo => :alias};
-
# TODO: right now only treating col as string or term
-
30
columns.map do |col|
-
174
if col.kind_of?(Symbol) or col.kind_of?(String)
-
174
ret_symbol(col)
-
elsif col.kind_of?(Hash) and col.size == 1
-
{ret_scalar(col.keys.first) => ret_symbol(Aux::ret_value(col))}
-
else
-
raise ErrorPatternNotImplemented.new(:column,col)
-
end
-
end
-
end
-
-
1
def ret_filter(hash_input)
-
30
filter = find_key_from_input(:filter,hash_input)
-
30
return Array.new if filter.nil? or filter.empty?
-
-
# TODO: just treating some subset of patterns
-
27
ret = Array.new
-
27
if filter.kind_of?(Array)
-
27
op,args = get_op_and_args(filter)
-
27
if op.nil?
-
log_parsing_error_to_skip(:filter_operation,op)
-
return ret
-
elsif not [:and,:or].include?(op)
-
# assume implicit and
-
10
args = [[op] + args]
-
10
op = :and
-
end
-
27
ret << op
-
27
args.each do |el|
-
28
el_op,el_args = get_op_and_args(el)
-
# processing nested ands and ors
-
28
if [:and,:or].include?(el_op)
-
ret << ret_filter(:filter => el)
-
else
-
28
unless el_op and el_args and el_args.size == 2 and FilterOperationsParsed.include?(el_op)
-
log_parsing_error_to_skip(:expression,el)
-
next
-
end
-
28
if el_op == :oneof
-
1
unless el_args[1].kind_of?(Array)
-
log_parsing_error_to_skip(:argument_to_one_of,el_args[1])
-
next
-
end
-
1
ret << [el_op,ret_scalar(el_args[0]),el_args[1]]
-
else
-
81
ret << ([el_op] + el_args.map{|x|ret_scalar(x)})
-
end
-
end
-
end
-
else
-
log_parsing_error_to_skip(:filter,filter)
-
end
-
27
ret
-
end
-
1
FilterOperationsParsed = [:eq, :neq, :lt, :lte, :gt, :gte, "match-prefix".to_sym, :regex, :oneof]
-
-
1
def ret_order_by(hash_input)
-
30
order_by = find_key_from_input(:order_by,hash_input)
-
30
return Array.new if order_by.nil? or order_by.empty?
-
raise ErrorParsing.new(:order_by,order_by) unless order_by.kind_of?(Array)
-
order_by.map do |el|
-
raise ErrorParsing.new(:order_by_element,el) unless el.kind_of?(Hash) and el.size <= 2
-
field = (el.find{|k,v|ret_symbol(k) == :field}||[nil,nil])[1]
-
raise ErrorParsing.new(:order_by_element,el) unless field
-
order = (el.find{|k,v|ret_symbol(k) == :order}||[nil,"ASC"])[1]
-
raise ErrorParsing.new(:order_by_order_direction,order) unless ["ASC","DESC"].include?(order)
-
{:field => ret_symbol(field), :order => order}
-
end
-
end
-
-
1
def ret_paging(hash_input)
-
30
paging = find_key_from_input(:paging,hash_input)
-
30
return Hash.new if paging.nil? or paging.empty?
-
raise ErrorParsing.new(:paging,paging) unless paging.kind_of?(Hash) and paging.size <= 2
-
start = (paging.find{|k,v|ret_symbol(k) == :start}||[nil,nil])[1]
-
raise ErrorParsing.new(:paging_start,paging) unless start
-
limit = (paging.find{|k,v|ret_symbol(k) == :limit}||[nil,nil])[1]
-
{:start => start.to_i}.merge(limit ? {:limit => limit.to_i} : {})
-
end
-
-
# return op in symbol form and args
-
1
def get_op_and_args(expr)
-
55
return nil unless expr.kind_of?(Array)
-
55
[ret_symbol(expr.first),expr[1..expr.size-1]]
-
end
-
-
# converts if symbol still in string form; otehrwise keeps as string
-
1
def ret_symbol(term_in_json)
-
# TODO short circuit if parsed already
-
3261
raise ErrorParsing.new(:symbol,term_in_json) if [Array,Hash].detect{|t|term_in_json.kind_of?(t)}
-
# TODO: remove patch
-
1087
return :eq if term_in_json == ":"
-
# complexity due to handle case where have form :":columns"
-
1087
term_in_json.to_s.gsub(/^[:]+/,'').to_sym
-
end
-
-
1
def ret_scalar(term_in_json)
-
165
raise ErrorParsing.new(:symbol,term_in_json) if [Array,Hash].detect{|t|term_in_json.kind_of?(t)}
-
# complexity due to handle case where have form :":columns"
-
55
return term_in_json.to_s.gsub(/^[:]+/,'').to_sym if term_in_json.kind_of?(Symbol)
-
27
return $1.to_sym if (term_in_json.kind_of?(String) and term_in_json =~ /^[:]+(.+)/)
-
27
term_in_json
-
end
-
-
1
def ret_symbol_key(obj)
-
ret_symbol(Aux::ret_key(obj))
-
end
-
-
1
def log_parsing_error_to_skip(type,object)
-
Log.error("skipping ill-formed #{type} which has form: #{object.inspect}")
-
end
-
1
class ErrorParsing < Error
-
1
def initialize(type,object)
-
super("parsing item #{type} is not supported; it has form: #{object.inspect}")
-
end
-
end
-
1
class ErrorPatternNotImplemented < Error::NotImplemented
-
1
def initialize(type,object)
-
super("parsing item #{type} is not supported; it has form: #{object.inspect}")
-
end
-
end
-
end
-
end
-
# TODO: need to reconcile or have better names on this versus NodeGroup
-
1
module DTK
-
# This class represents objects that are specfied by node groups in assembly templates and service insatnces
-
# TODO: double check this is accurate description; they capture what Input::BaseNodes is,
-
# but maybe not inventory data subclass
-
1
class ServiceNodeGroup < Node
-
1
r8_nested_require('service_node_group','id_name_helper')
-
1
r8_nested_require('service_node_group','clone')
-
1
r8_nested_require('service_node_group','node_group_member')
-
1
r8_nested_require('service_node_group','cache')
-
-
1
def self.check_valid_id(model_handle,id)
-
IdNameHelper.check_valid_id(model_handle,id)
-
end
-
1
def self.name_to_id(model_handle,name)
-
IdNameHelper.name_to_id(model_handle,name)
-
end
-
1
def self.id_to_name(model_handle, id)
-
IdNameHelper.id_to_name(model_handle, id)
-
end
-
-
# clone_components_to_members returns array with each element being a cloned component
-
# on node_members with their attributes; it clones if necssary
-
# if opts[:node_group_components] then filter to only include components corresponding
-
# to these node_group_components
-
1
def clone_and_get_components_with_attrs(node_members,opts={})
-
Clone.clone_and_get_components_with_attrs(self,node_members,opts)
-
end
-
-
# called when bumping up cardinaility in a service instance
-
1
def add_group_members(new_cardinality)
-
target = get_target()
-
assembly = get_assembly?()
-
new_tr_idhs = nil
-
Transaction do
-
ndx_new_tr_idhs = TargetRef::Input::BaseNodes.create_linked_target_refs?(target,assembly,[self],:new_cardinality => new_cardinality)
-
unless new_tr_idhs = ndx_new_tr_idhs && ndx_new_tr_idhs[id()]
-
raise Error.new("Unexpected that new_tr_idhs is empty")
-
end
-
-
# add attribute mappings, cloning if needed
-
create_attribute_links__clone_if_needed(target,new_tr_idhs)
-
-
# find or add state change for node group and then add state change objects for new node members
-
node_group_sc = StateChange.create_pending_change_item?(:new_item => id_handle(), :parent => target.id_handle())
-
node_group_sc_idh = node_group_sc.id_handle()
-
new_items_hash = new_tr_idhs.map{|idh|{:new_item => idh, :parent => node_group_sc_idh}}
-
StateChange.create_pending_change_items(new_items_hash)
-
end
-
new_tr_idhs
-
end
-
-
1
def delete_group_members(new_cardinality)
-
node_members = get_node_group_members()
-
num_to_delete = node_members.size - new_cardinality
-
# to find ones to delete;
-
# first look for :admin_op_status == pending"
-
# then pick ones with highest index
-
#TODO: can be more efficient then needing to sort who thing
-
sorted = node_members.sort do |a,b|
-
a_op = (a[:admin_op_status] ? 1 : 0)
-
b_op = (b[:admin_op_status] ? 1 : 0)
-
if b_op != a_op
-
b_op <=> a_op
-
else
-
(b[:index]||0) <=> (a[:index]||0)
-
end
-
end
-
to_delete = (0...num_to_delete).map{|i|sorted[i]}
-
to_delete.each{|node_group_member|node_group_member.destroy_and_delete()}
-
end
-
-
1
def bump_down_cardinality(amount=1)
-
card = attribute.cardinality
-
new_card = card - amount
-
if new_card < 0
-
raise ErrorUsage.new("Existing cardinality (#{card.to_s}) is less than amount to decrease it by (#{amount.to_s})")
-
end
-
Node::NodeAttribute.create_or_set_attributes?([self],:cardinality,new_card)
-
new_card
-
end
-
-
1
def get_node_group_members()
-
self.class.get_node_group_members(id_handle())
-
end
-
1
def self.get_node_group_members(node_group_idh)
-
get_ndx_node_group_members([node_group_idh]).values.first||[]
-
end
-
-
1
def self.get_ndx_node_group_members(node_group_idhs)
-
ret = Hash.new
-
return ret if node_group_idhs.empty?
-
sp_hash = {
-
:cols => [:id,:display_name,:node_members],
-
:filter => [:oneof,:id,node_group_idhs.map{|ng|ng.get_id()}]
-
}
-
mh = node_group_idhs.first.createMH()
-
get_objs(mh,sp_hash).each do |ng|
-
node_member = ng[:node_member]
-
target = ng[:target]
-
node_member.merge!(:target => target) if target
-
if index = TargetRef.node_member_index(node_member)
-
node_member.merge!(:index => index)
-
end
-
ndx = ng[:id]
-
(ret[ndx] ||= Array.new) << node_member
-
end
-
ret
-
end
-
-
# making robust so checks if node_or_ngs has node groups already
-
1
def self.expand_with_node_group_members?(node_or_ngs,opts={})
-
ret = node_or_ngs
-
ng_idhs = node_or_ngs.select{|n|n.is_node_group?}.map{|n|n.id_handle()}
-
if ng_idhs.empty?
-
return ret
-
end
-
ndx_node_members = get_ndx_node_group_members(ng_idhs)
-
ndx_ret = Hash.new
-
node_or_ngs.each do |n|
-
if n.is_node_group?
-
ndx_ret.merge!(n.id => n) unless opts[:remove_node_groups]
-
# (ndx_node_members[n[:id]]||[]).each{|n|ndx_ret.merge!(n.id => n)}
-
(ndx_node_members[n[:id]]||[]).each do |node|
-
if opts[:add_group_member_components]
-
components = n.info_about(:components)
-
node.merge!(:components => components) unless components.empty?
-
end
-
ndx_ret.merge!(node.id => node)
-
end
-
else
-
ndx_ret.merge!(n.id => n)
-
end
-
end
-
ndx_ret.values
-
end
-
-
1
def self.get_node_groups?(node_or_ngs)
-
ndx_ret = Hash.new
-
node_or_ngs.each do |n|
-
ndx_ret.merge!(n.id => n) if n.is_node_group?
-
end
-
-
return (ndx_ret.empty? ? ndx_ret : ndx_ret.values)
-
end
-
-
1
def self.get_node_attributes_to_copy(node_group_idhs)
-
Node.get_target_ref_attributes(node_group_idhs,:cols=>NodeAttributesToCopy)
-
end
-
1
NodeAttributesToCopy = (Attribute.common_columns + [:ref,:node_node_id]).uniq - [:id]
-
-
1
def destroy_and_delete(opts={})
-
get_node_group_members().map{|node|node.destroy_and_delete(opts)}
-
delete_object(:members_are_deleted=>true)
-
end
-
1
def delete_object(opts={})
-
unless opts[:members_are_deleted]
-
get_node_group_members().map{|node|node.delete_object(opts)}
-
end
-
super(opts)
-
end
-
-
1
private
-
1
def create_attribute_links__clone_if_needed(target,target_ref_idhs)
-
port_links = get_port_links()
-
return if port_links.empty?
-
opts_create_links = {:set_port_link_temporal_order=>true, :filter => {:target_ref_idhs => target_ref_idhs}}
-
port_links.each do |port_link|
-
port_link.create_attribute_links__clone_if_needed(target.id_handle,opts_create_links)
-
end
-
end
-
end
-
end
-
-
2
module DTK; class ServiceNodeGroup
-
1
class Cache < self
-
1
def self.model_name()
-
:node
-
end
-
-
1
def self.create_as(node,target_refs)
-
super(node).set_target_refs!(target_refs)
-
end
-
-
1
def set_target_refs!(target_refs)
-
@target_refs = target_refs
-
self
-
end
-
-
1
def get_node_attributes(opts={})
-
@node_attributes ||= get_node_attributes_aux(opts)
-
end
-
-
# gets component attributes on node members, first cloning components from node group if needed
-
1
def get_component_attributes(node_group_component,opts={})
-
# indexed by node_group_component_id
-
@ndx_component_attributes ||= Hash.new
-
ndx = node_group_component.id()
-
cmps_with_attrs = @ndx_component_attributes[ndx] ||= clone_and_get_components_with_attrs(node_group_component,opts)
-
cmps_with_attrs.inject(Array.new){|a,cmp|a + cmp[:attributes]}
-
end
-
-
1
private
-
1
def get_node_attributes_aux(opts={})
-
target_ref_ids = target_ref_ids?(opts[:filter]) || @target_refs.map{|n|n.id}
-
sp_hash = {
-
:cols => [:id,:group,:display_name,:node_node_id],
-
:filter => [:oneof, :node_node_id,target_ref_ids]
-
}
-
attr_mh = model_handle(:attribute)
-
Model.get_objs(attr_mh,sp_hash)
-
end
-
-
1
def clone_and_get_components_with_attrs(node_group_component,opts={})
-
# clone_components_to_members returns array with each element being a cloned component
-
# and within that element an :attributes field that has all clone attributes
-
target_refs = @target_refs
-
if target_ref_ids = target_ref_ids?(opts[:filter])
-
target_refs = target_refs.select{|r|target_ref_ids.include?(r[:id])}
-
end
-
super(target_refs,:node_group_components => [node_group_component])
-
end
-
-
1
def target_ref_ids?(filter=nil)
-
filter && (filter[:target_ref_idhs]||[]).map{|idh|idh.get_id()}
-
end
-
end
-
end; end
-
-
2
module DTK; class ServiceNodeGroup
-
1
module Clone
-
# clone_components_to_members returns array with each element being a cloned component
-
# on node_members with their attributes; it clones if necssary
-
# if opts[:node_group_components] then filter to only include components corresponding
-
# to these node_group_components
-
1
def self.clone_and_get_components_with_attrs(node_group,node_members,opts={})
-
needs_cloning, cloned_components = determine_cloned_components(node_group,node_members,opts)
-
ret = needs_cloning.map do |pair|
-
clone_component(pair.node_group_component,pair.node_group_member)
-
end
-
unless cloned_components.empty?
-
ret += get_components_with_attributes(cloned_components)
-
end
-
ret
-
end
-
-
1
private
-
# returns a cloned component with a field :attributes, which has all the components attributes
-
1
def self.clone_component(node_group_cmp,node_group_member)
-
clone_opts = {
-
:include_list => [:attribute],
-
:ret_new_obj_with_cols => [:id,:group_id,:display_name],
-
:ret_clone_copy_output => true,
-
:no_violation_checking => true
-
}
-
override_attrs = {:attribute => {:hidden => true}}
-
clone_copy_output = node_group_member.clone_into(node_group_cmp,override_attrs,clone_opts)
-
node_member_cmp = clone_copy_output.objects.first
-
level = 1
-
attributes = clone_copy_output.children_objects(level,:attribute)
-
node_member_cmp.merge(:attributes => attributes)
-
end
-
-
1
ComponentNodePair = Struct.new(:node_group_component,:node_group_member)
-
# returns two arrays [needs_cloning, cloned_components]
-
# needs_cloning has elements of type ComponentNodePairs
-
# where component is node group component and node is node member
-
# cloned_components is array with cloned components
-
# if opts[:node_group_components] then filter to only include components corresponding
-
# to these node_group_components
-
1
def self.determine_cloned_components(node_group,node_members,opts)
-
needs_cloning, cloned_components = [], []
-
ret = [needs_cloning, cloned_components]
-
return ret if node_members.empty?()
-
node_group_id = node_group.id()
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:node_node_id,:ancestor_id],
-
:filter => [:oneof, :node_node_id, node_members.map{|n|n.id}+[node_group_id]]
-
}
-
# ndx_cmps is double indexed by [node_id][cmp_id]
-
ndx_cmps = Hash.new
-
cmp_mh = node_group.model_handle(:component)
-
Model.get_objs(cmp_mh,sp_hash).each do |cmp|
-
node_id = cmp[:node_node_id]
-
cmp_id = cmp[:id]
-
(ndx_cmps[node_id] ||= Hash.new).merge!(cmp_id => cmp)
-
end
-
-
ndx_ng_cmps = ndx_cmps[node_group_id]||{}
-
ng_cmp_ids = ndx_ng_cmps.keys
-
if restricted_cmps = opts[:node_group_components]
-
ng_cmp_ids = ng_cmp_ids & restricted_cmps.map{|r|r.id}
-
end
-
-
return ret if ng_cmp_ids.empty?
-
-
node_members.each do |node|
-
# for each node group component id see if there is a corresponding component on
-
# the node (member) by looking at if there is cloned component that has
-
# ancestor_id as as matching ng_cmp_id
-
#
-
# To enable this compute an ndx that takes ancestor_id to cmp_id;
-
# this is possible because cmps_on_node has unique ancestor_ids
-
cmps_on_node = (ndx_cmps[node.id]||{}).values
-
ndx_ancestor_id_to_cmp = cmps_on_node.inject(Hash.new){|h,r|h.merge(r[:ancestor_id] => r)}
-
ng_cmp_ids.each do |ng_cmp_id|
-
if cloned_cmp = ndx_ancestor_id_to_cmp[ng_cmp_id]
-
cloned_components << cloned_cmp
-
else
-
ng_cmp = ndx_ng_cmps[ng_cmp_id]
-
# node is of type Node and we want to use type NodeGroupMember
-
node_group_member = NodeGroupMember.create_as(node)
-
needs_cloning << ComponentNodePair.new(ng_cmp,node_group_member)
-
end
-
end
-
end
-
ret
-
end
-
-
1
def self.get_components_with_attributes(components)
-
ret = Array.new
-
return ret if components.empty?
-
ndx_cmps = components.inject(Hash.new) do |h,cmp|
-
h.merge(cmp[:id] => cmp.merge(:attributes => Array.new))
-
end
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:component_component_id],
-
:filter => [:oneof,:component_component_id,ndx_cmps.keys]
-
}
-
attr_mh = components.first.model_handle(:attribute)
-
Model.get_objs(attr_mh,sp_hash).each do |attr|
-
ndx = attr[:component_component_id]
-
ndx_cmps[ndx][:attributes] << attr
-
end
-
ndx_cmps.values
-
end
-
-
end
-
end; end
-
-
1
module DTK
-
1
class ServiceNodeGroup
-
1
module IdNameHelper
-
1
def self.check_valid_id(model_handle,id)
-
check_valid_id_helper(model_handle,id,filter(:id => id))
-
end
-
1
def self.name_to_id(model_handle,name)
-
sp_hash = {
-
:cols => [:id],
-
:filter => filter(:display_name => name)
-
}
-
name_to_id_helper(model_handle,name,sp_hash)
-
end
-
1
def self.id_to_name(model_handle, id)
-
sp_hash = {
-
:cols => [:display_name],
-
:filter => filter(:id => id)
-
}
-
rows = get_objs(model_handle,sp_hash)
-
rows && rows.first[:display_name]
-
end
-
-
1
private
-
1
def self.filter(added_condition_hash)
-
FilterBase + [[:eq, added_condition_hash.keys.first,added_condition_hash.values.first]]
-
end
-
-
1
NodeType = 'service_node_group'
-
1
FilterBase =
-
[:and,
-
[:eq, :type, NodeType],
-
[:neq, :datacenter_datacenter_id, nil]
-
]
-
end
-
end
-
end
-
-
1
module DTK
-
1
class ServiceNodeGroup
-
1
class NodeGroupMember < ::DTK::Node
-
1
def self.model_name()
-
:node
-
end
-
1
def bump_down_associated_node_group_cardinality()
-
service_node_group().bump_down_cardinality()
-
end
-
1
def clone_post_copy_hook(clone_copy_output,opts={})
-
# no op
-
end
-
1
private
-
1
def service_node_group()
-
return @service_node_group if @service_node_group
-
sp_hash = {
-
:cols => [:id,:service_node_group],
-
:filter => [:eq,:node_id,id()]
-
}
-
nodes = Model.get_objs(model_handle(:node_group_relation),sp_hash).map{|r|r[:service_node_group]}
-
unless nodes.size == 1
-
raise Error.new("Unexpected that rows.size (#{nodes.size.to_s}) does not equal 1")
-
end
-
ret = nodes.first
-
unless ret.is_node_group?()
-
raise Error.new("Unexpected that node (#{ret.inspect}) connected to node group member (#{get_field?(:display_name)}) is not a node group")
-
end
-
@service_node_group = ServiceNodeGroup.create_as(ret)
-
end
-
end
-
end
-
end
-
-
-
# TODO: dtermine whether to handle ng component to ng member using links or by having processing
-
# change to node group component specially, which is implemented now
-
# This would replace no op above
-
# def clone_post_copy_hook(clone_copy_output,opts={})
-
# add attribute links between source components and the ones generated
-
# level = 1
-
# cols_to_get = [:id,:group_id,:display_name,:ancestor_id]
-
# cloned_attributes = clone_copy_output.children_objects(level,:attribute, :cols => cols_to_get)
-
# link_node_group_attributes_to_clone_ones(cloned_attributes)
-
# end
-
# private
-
# def link_node_group_attributes_to_clone_ones(cloned_attributes)
-
# return if cloned_attributes.empty?
-
# attr_mh = cloned_attributes.first.model_handle()
-
# # TODO:
-
# end
-
# end
-
# end
-
#end
-
1
module DTK
-
1
class StateChange < Model
-
1
r8_nested_require('state_change','get_pending_changes')
-
1
r8_nested_require('state_change','create')
-
1
r8_nested_require('state_change','assembly')
-
1
r8_nested_require('state_change','node_centric')
-
-
1
extend GetPendingChangesClassMixin
-
1
extend CreateClassMixin
-
-
1
def self.list_pending_changes(target_idh)
-
# TODO: may pass in options so dont get all fields that are returned in flat_list_pending_changes
-
pending_changes = flat_list_pending_changes(target_idh)
-
ndx_ret = Hash.new
-
pending_changes.each do |ch|
-
node_id = ch[:node][:id]
-
node = ndx_ret[node_id] ||= {:node_id => node_id, :node_name => ch[:node][:display_name], :node_changes => Array.new, :ndx_cmp_changes => Hash.new}
-
if ch[:type] == "create_node"
-
node[:node_changes] << {:name => ret_display_name(ch)}
-
else
-
cmp_id = ch[:component][:id]
-
cmp = node[:ndx_cmp_changes][cmp_id] ||= {:component_id => cmp_id, :component_name => ch[:component][:display_name], :changes => Array.new}
-
# TODO stub
-
cmp[:changes] << ret_display_name(ch)
-
end
-
end
-
ndx_ret.values.map do |n|
-
changes = n[:node_changes] + n[:ndx_cmp_changes].values
-
el = {:node_id => n[:node_id], :node_name => n[:node_name]}
-
el.merge!(:node_changes => n[:node_changes]) unless n[:node_changes].empty?
-
el.merge!(:component_changes => n[:ndx_cmp_changes].values) unless n[:ndx_cmp_changes].empty?
-
el
-
end
-
end
-
-
# object processing and access functions
-
#######################
-
1
def on_node_config_agent_type()
-
ret = (self[:component]||{})[:config_agent_type]
-
ret && ret.to_sym
-
end
-
-
1
def create_node_config_agent_type()
-
# TODO: stub
-
:ec2
-
end
-
-
1
def self.state_changes_are_concurrent?(state_change_list)
-
rel_order = state_change_list.map{|x|x[:relative_order]}
-
val = rel_order.shift
-
rel_order.each{|x|return nil unless x == val}
-
true
-
end
-
-
1
def self.ret_display_name(flat_pending_ch)
-
type = flat_pending_ch[:type]
-
node_name = flat_pending_ch[:node][:display_name]
-
suffix =
-
case type
-
when "create_node"
-
node_name
-
when "install_component", "update_implementation"
-
cmp_name = flat_pending_ch[:component][:display_name]
-
"#{node_name}:#{cmp_name}"
-
else
-
Log.error("need rules to treat type (#{type})")
-
nil
-
end
-
suffix ? "#{type}(#{suffix})" : type
-
end
-
end
-
end
-
2
module DTK; class StateChange
-
1
class Assembly < self
-
1
def self.component_state_changes(assembly,component_type=nil)
-
filter = [:and, [:eq, :assembly_id, assembly[:id]]]
-
if (component_type == :smoketest)
-
filter << [:eq, :basic_type, "smoketest"]
-
else
-
filter << [:neq, :basic_type, "smoketest"]
-
end
-
sp_hash = {
-
:cols => DTK::Component::pending_changes_cols,
-
:filter => filter
-
}
-
state_change_mh = assembly.model_handle(:state_change)
-
-
changes = get_objs(assembly.model_handle(:component),sp_hash).map do |cmp|
-
node = cmp.delete(:node)
-
hash = {
-
:type => "converge_component",
-
:component => cmp,
-
:node => node
-
}
-
create_stub(state_change_mh,hash)
-
end
-
##group by node id
-
ndx_ret = Hash.new
-
changes.each do |sc|
-
node_id = sc[:node][:id]
-
(ndx_ret[node_id] ||= Array.new) << sc
-
end
-
-
# Sorting components on each node by 'ordered_component_ids' field
-
sorted_ndx_ret = Array.new
-
begin
-
ndx_ret.values.each do |component_list|
-
ordered_component_ids = component_list.first[:node].get_ordered_component_ids()
-
sorted_component_list = Array.new
-
component_list.each do |change|
-
sorted_component_list[ordered_component_ids.index(change[:component][:id])] = change
-
end
-
sorted_ndx_ret << sorted_component_list.compact
-
end
-
rescue Exception => e
-
# Sorting components failed. Returning random component order
-
return ndx_ret.values
-
end
-
sorted_ndx_ret
-
end
-
-
##
-
# The method node_state_changes returns state changes related to nodes
-
1
def self.node_state_changes(task_action_type,assembly,target_idh,opts={})
-
case task_action_type
-
when :create_node
-
node_state_changes__create_nodes(assembly,target_idh,opts)
-
when :power_on_node
-
node_state_changes__power_on_nodes(assembly,target_idh,opts)
-
else
-
raise Error.new("Unexpcted task_action_class (#{task_action_class})")
-
end
-
end
-
1
private
-
1
def self.node_state_changes__create_nodes(assembly,target_idh,opts={})
-
ret = Array.new
-
assembly_nodes = opts[:nodes]||assembly.get_nodes()
-
return ret if assembly_nodes.empty?
-
-
added_state_change_filters = [[:oneof, :node_id, assembly_nodes.map{|r|r[:id]}]]
-
target_mh = target_idh.createMH()
-
last_level = pending_create_node(target_mh,[target_idh],:added_filters => added_state_change_filters)
-
state_change_mh = target_mh.create_childMH(:state_change)
-
while not last_level.empty?
-
ret += last_level
-
last_level = pending_create_node(state_change_mh,last_level.map{|obj|obj.id_handle()})
-
end
-
ret
-
if opts[:just_leaf_nodes]
-
ret.reject{|sc|sc[:node].is_node_group?()}
-
end
-
end
-
-
1
def self.node_state_changes__power_on_nodes(assembly,target_idh,opts={})
-
ret = Array.new()
-
unless opts[:just_leaf_nodes]
-
raise Error.new("Only supporting option :just_leaf_nodes")
-
end
-
nodes = opts[:nodes]||assembly.get_leaf_nodes(:cols => [:id,:display_name,:type,:external_ref,:admin_op_status])
-
nodes_to_start = nodes.reject{|n|n[:admin_op_status] == "running"}
-
return ret if nodes_to_start.empty?
-
-
state_change_mh = assembly.model_handle(:state_change)
-
nodes_to_start.map do |node|
-
hash = {
-
:type => "power_on_node",
-
:node => node
-
}
-
create_stub(state_change_mh,hash)
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class StateChange
-
1
module CreateClassMixin
-
1
def create_pending_change_item(new_item_hash,opts={})
-
target_idh = opts[:target_idh] || Create.target_idh(new_item_hash[:parent])
-
Create.new(target_idh).pending_change_items([new_item_hash],opts).first
-
end
-
-
# if pending change object exists, it returns it and updates its status to 'pending' if needed
-
# otherwise it creates a new one and returns it
-
1
def create_pending_change_item?(new_item_hash,opts={})
-
target_idh = opts[:target_idh] || Create.target_idh(new_item_hash[:parent])
-
Create.new(target_idh).pending_change_item?(new_item_hash,opts)
-
end
-
-
# assumption is that all items belong to same target
-
1
def create_pending_change_items(new_item_hashes,opts={})
-
ret = Array.new
-
return ret if new_item_hashes.empty?
-
target_idh = opts[:target_idh] || Create.target_idh(new_item_hashes.first[:parent])
-
Create.new(target_idh).pending_change_items(new_item_hashes,opts)
-
end
-
#TODO ### may deprecate below
-
1
def create_converge_state_changes(node_idhs)
-
return if node_idhs.empty?
-
target_idh = Create.target_idh(node_idhs.first)
-
Create.new(target_idh).converge_state_changes(node_idhs)
-
end
-
end
-
-
1
class Create
-
1
def initialize(target_idh)
-
@target_idh = target_idh
-
@target_id = target_idh.get_id()
-
end
-
-
1
def self.target_idh(parent_idh)
-
parent_idh.get_top_container_id_handle(:target)
-
end
-
-
1
def pending_change_item?(new_item_hash,opts={})
-
create_row = change_item_create_row(new_item_hash,opts)
-
cols = ([:id,:group_id,:display_name,:status,:node_id,:component_id] + (opts[:returning_sql_cols]||[])).uniq
-
sp_hash = {
-
:cols => cols,
-
:filter => [:and,
-
[:eq,:ref,create_row[:ref]],
-
[:eq,:datacenter_datacenter_id,create_row[:datacenter_datacenter_id]]]
-
}
-
-
model_handle = @target_idh.createMH(:model_name => :state_change, :parent_model_name => :target)
-
if ret = Model.get_obj(model_handle,sp_hash)
-
unless ret[:status] == 'pending'
-
ret[:status] = 'pending'
-
ret.update(:status => 'pending')
-
end
-
ret
-
else
-
opts_create = {:convert => true}.merge(Aux.hash_subset(opts,:returning_sql_cols))
-
Model.create_from_row(model_handle,create_row,opts_create).create_object()
-
end
-
end
-
-
1
def pending_change_items(new_item_hashes,opts={})
-
create_rows = new_item_hashes.map{|item|change_item_create_row(item,opts)}
-
model_handle = @target_idh.createMH(:model_name => :state_change, :parent_model_name => :target)
-
opts_create = {:convert => true}.merge(Aux.hash_subset(opts,:returning_sql_cols))
-
Model.create_from_rows(model_handle,create_rows,opts_create)
-
end
-
-
1
def change_item_create_row(item,opts={})
-
new_item = item[:new_item]
-
model_name = new_item[:model_name]
-
parent = item[:parent]
-
object_id_col = "#{model_name}_id".to_sym
-
-
ret = {
-
:ref => ref(model_name,item),
-
:display_name => display_name(model_name,item),
-
:status => "pending",
-
:type => item[:type] || type(model_name),
-
:object_type => model_name.to_s,
-
object_id_col => new_item.get_id(),
-
:datacenter_datacenter_id => @target_id
-
}
-
if parent[:model_name] == :state_change
-
ret.merge!(:state_change_id => parent.get_id())
-
end
-
ret.merge!(:change => item[:change]) if item[:change]
-
ret.merge!(:change_paths => item[:change_paths]) if item[:change_paths]
-
ret
-
end
-
-
1
def display_name(model_name,item)
-
display_name_prefix =
-
case model_name
-
when :attribute then "setting-attribute"
-
when :component then "install-component"
-
when :node then "create-node"
-
end
-
item_display_name = item[:new_item].get_field?(:display_name)
-
display_name_prefix + (item_display_name ? "(#{item_display_name})" : "")
-
end
-
-
1
def ref(model_name,item)
-
object_id = item[:new_item].get_id().to_s
-
parent_id = item[:parent].get_id().to_s
-
"#{RefPrefix}#{parent_id}--#{object_id}"
-
end
-
1
RefPrefix = "state_change"
-
-
1
def type(model_name)
-
case model_name
-
when :attribute then "setting"
-
when :component then "install_component"
-
when :node then "create_node"
-
else raise Error::NotImplemented.new("when object type is #{object_model_name}")
-
end
-
end
-
-
#TODO ### may deprecate below
-
1
public
-
1
def self.converge_state_changes(node_idhs)
-
sample_idh = node_idhs.first()
-
sp_hash = {
-
:cols => [:id,:datacenter_datacenter_id,:components]
-
}
-
new_item_hashes = Model.get_objs_in_set(node_idhs,sp_hash).map do |r|
-
{
-
:new_item => r[:component].id_handle(),
-
:parent => sample_idh.createIDH(:model_name => :datacenter, :id=> r[:datacenter_datacenter_id]),
-
:type => "converge_component"
-
}
-
end
-
pending_change_items(new_item_hashes)
-
end
-
end
-
end
-
end
-
# TODO: this file name somewhat of a misnomer; both pending changes but also converging a 'region' such as asssembly, node group, target ..
-
2
module DTK; class StateChange
-
1
module GetPendingChangesClassMixin
-
1
def get_ndx_node_config_changes(target_idh)
-
# TODO: there is probably more efficient info to get; this provides too much
-
changes = flat_list_pending_changes(target_idh)
-
# TODO: stub
-
changes.inject({}) do |h,r|
-
node_id = r[:node][:id]
-
h.merge(node_id => {:state => :changes, :detail => {}})
-
end
-
end
-
-
1
def node_config_change__no_changes()
-
{:state => :no_changes}
-
end
-
-
1
def flat_list_pending_changes(target_idh,opts={})
-
target_mh = target_idh.createMH()
-
last_level = pending_changes_one_level_raw(target_mh,[target_idh],opts)
-
ret = Array.new
-
state_change_mh = target_mh.create_childMH(:state_change)
-
while not last_level.empty?
-
ret += last_level
-
last_level = pending_changes_one_level_raw(state_change_mh,last_level.map{|obj|obj.id_handle()},opts)
-
end
-
remove_dups_and_proc_related_components(ret)
-
end
-
-
1
def pending_changes_one_level_raw(parent_mh,idh_list,opts={})
-
pending_create_node(parent_mh,idh_list,opts) +
-
pending_changed_component(parent_mh,idh_list,opts) +
-
pending_changed_attribute(parent_mh,idh_list,opts)
-
end
-
-
1
def pending_create_node(parent_mh,idh_list,opts={})
-
parent_field_name = DB.parent_field(parent_mh[:model_name],:state_change)
-
filter =
-
[
-
:and,
-
[:oneof, parent_field_name,idh_list.map{|idh|idh.get_id()}],
-
[:eq, :type, "create_node"],
-
[:eq, :status, "pending"]]
-
filter += opts[:added_filters] if opts[:added_filters]
-
-
sp_hash = {
-
:filter => filter,
-
:cols => [:id,:relative_order,:type,:created_node,parent_field_name,:state_change_id,:node_id].uniq
-
}
-
state_change_mh = parent_mh.createMH(:state_change)
-
# using ndx_ret to remove duplicate pending changes for same node
-
ndx_ret = Hash.new
-
get_objs(state_change_mh,sp_hash).each do |r|
-
node_id = r[:node][:id]
-
ndx_ret[node_id] ||= r
-
end
-
pending_scs = ndx_ret.values
-
-
# TODO: compensating for fact that a component a node group could have state pending, but
-
# no changes under it
-
node_group_scs = pending_scs.select{|sc|sc[:node].is_node_group?()}
-
return pending_scs if node_group_scs.empty?
-
sc_ids_to_remove = find_any_without_pending_children?(node_group_scs.map{|sc|sc.id_handle()})
-
#remove any sc in pending_scs that is in ndx_ng_sc_idhs but not in ndx_to_keep
-
return pending_scs if sc_ids_to_remove.empty? #shortcut
-
pending_scs.reject{|sc|sc_ids_to_remove.include?(sc.id)}
-
end
-
-
#returns ids for all that do not pending children
-
1
def find_any_without_pending_children?(sc_idhs)
-
ret = Array.new
-
return ret if sc_idhs.empty?
-
ndx_found = sc_idhs.inject(Hash.new){|h,sc_idh|h.merge(sc_idh.get_id() => nil)} #initially setting evrything to nil and flipping if found
-
sp_hash = {
-
:cols => [:state_change_id],
-
:filter => [:and,
-
[:oneof, :state_change_id, ndx_found.keys],
-
[:eq, :type, "create_node"],
-
[:eq, :status, "pending"]]
-
}
-
-
sc_mh = sc_idhs.first.createMH()
-
get_objs(sc_mh,sp_hash).each{|sc|ndx_found[sc[:state_change_id]] ||= true}
-
ndx_found.each_pair{|sc_id,found|ret << sc_id unless found}
-
ret
-
end
-
1
private :find_any_without_pending_children?
-
-
1
def pending_changed_component(parent_mh,idh_list,opts={})
-
parent_field_name = DB.parent_field(parent_mh[:model_name],:state_change)
-
sp_hash = {
-
:filter => [:and,
-
[:oneof, parent_field_name,idh_list.map{|idh|idh.get_id()}],
-
[:oneof, :type, ["install_component", "update_implementation","converge_component"]],
-
[:eq, :status, "pending"]],
-
:columns => [:id, :relative_order,:type,:changed_component,parent_field_name,:state_change_id].uniq
-
}
-
state_change_mh = parent_mh.createMH(:state_change)
-
sc_with_direct_cmps = get_objs(state_change_mh,sp_hash)
-
add_related_components(sc_with_direct_cmps)
-
end
-
-
1
def pending_changed_attribute(parent_mh,idh_list,opts={})
-
parent_field_name = DB.parent_field(parent_mh[:model_name],:state_change)
-
sp_hash = {
-
:filter => [:and,
-
[:oneof, parent_field_name,idh_list.map{|idh|idh.get_id()}],
-
[:eq, :type, "setting"],
-
[:eq, :status, "pending"]],
-
:columns => [:id, :relative_order,:type,:changed_attribute,parent_field_name,:state_change_id].uniq
-
}
-
state_change_mh = parent_mh.createMH(:state_change)
-
sc_with_direct_cmps = get_objs(state_change_mh,sp_hash)
-
add_related_components(sc_with_direct_cmps)
-
end
-
-
1
def add_related_components(sc_with_direct_cmps)
-
component_index = Hash.new
-
sc_with_direct_cmps.each do |sc|
-
cmp_id = sc[:component][:id]
-
unless component_index[cmp_id]
-
component_index[cmp_id] = sc
-
else
-
if sc[:type] == "install_component"
-
component_index[cmp_id] = sc
-
end
-
end
-
end
-
-
cols = [:id,:display_name,:basic_type,:external_ref,:node_node_id,:only_one_per_node,:extended_base,:implementation_id]
-
cmps_in_sc = component_index.values.map{|sc|sc[:component]}
-
related_cmps = Component.get_component_instances_related_by_mixins(cmps_in_sc,cols)
-
# TODO: assumption that cmps only appear once in sc_with_direct_cmps
-
-
sc_with_related_cmps = Array.new
-
related_cmps.map do |cmp|
-
cmp[:assoc_component_ids].each do |cmp_id|
-
related_sc = component_index[cmp_id].merge(:component => cmp)
-
sc_with_related_cmps << related_sc
-
end
-
end
-
-
sc_with_direct_cmps + sc_with_related_cmps
-
end
-
-
1
def remove_dups_and_proc_related_components(state_changes)
-
indexed_ret = Hash.new
-
# remove duplicates wrt component and process linked_ids
-
state_changes.each do |sc|
-
if sc[:type] == "create_node"
-
indexed_ret[sc[:node][:id]] = augment_with_linked_id(sc,sc[:id])
-
# TODO: ordering may do thsis anyway, but do we explicitly want to make sure if both setting adn isnatll use install as type
-
elsif ["setting","install_component","update_implementation","converge_component"].include?(sc[:type])
-
indexed_ret[sc[:component][:id]] = augment_with_linked_id(indexed_ret[sc[:component][:id]] || sc.reject{|k,v|[:attribute].include?(k)},sc[:id])
-
else
-
Log.error("unexpected type #{sc[:type]}; ignoring")
-
end
-
end
-
indexed_ret.values
-
end
-
-
1
private
-
# linked ids is link to relevant stage_change objects
-
1
def augment_with_linked_id(state_change,id)
-
if linked = state_change[:linked_ids]
-
linked.include?(id) ? state_change : state_change.merge(:linked_ids => linked + [id])
-
else
-
state_change.merge(:linked_ids => [id])
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class StateChange
-
1
class NodeCentric < self
-
1
def self.node_state_changes(target_idh,opts)
-
ret = Array.new
-
unless added_sc_filter = ret_node_sc_filter(target_idh,opts)
-
return ret
-
end
-
target_mh = target_idh.createMH()
-
last_level = pending_create_node(target_mh,[target_idh],:added_filters => [added_sc_filter])
-
state_change_mh = target_mh.create_childMH(:state_change)
-
while not last_level.empty?
-
ret += last_level
-
last_level = pending_create_node(state_change_mh,last_level.map{|obj|obj.id_handle()},:added_filters => [added_sc_filter])
-
end
-
##group by node id (and using fact that each wil be unique id)
-
ret.map{|ch|[ch]}
-
end
-
-
1
def self.component_state_changes(mh,opts)
-
ret = Array.new
-
# find nodes and node_to_ng mapping
-
nodes,node_to_ng = get_nodes_and_node_to_ng_index(mh,opts)
-
if nodes.empty?
-
return ret
-
end
-
-
# find components associated with each node or node group
-
ndx_cmps = Hash.new
-
-
sp_hash = {
-
:cols => [:id,:display_name,:node_centric_components],
-
:filter => [:oneof, :id, ret_node_group_ids(node_to_ng) + nodes.map{|n|n[:id]}]
-
}
-
rows = get_objs(mh.createMH(:node),sp_hash)
-
if rows.empty?
-
return ret
-
end
-
-
rows.each do |row|
-
(ndx_cmps[row[:id]] ||= Array.new) << row[:component]
-
end
-
-
# compute state changes
-
state_change_mh = mh.createMH(:state_change)
-
nodes.each do |node|
-
node_cmps = Array.new
-
node_id = node[:id]
-
ng_ids = (node_to_ng[node_id]||{}).keys
-
([node_id] + ng_ids).each do |node_or_ng_id|
-
(ndx_cmps[node_or_ng_id]||[]).each do |cmp|
-
hash = {
-
:type => "converge_component",
-
:component => cmp,
-
:node => node,
-
}
-
node_cmps << create_stub(state_change_mh,hash)
-
end
-
end
-
ret << node_cmps
-
end
-
ret
-
end
-
-
1
class << self
-
1
private
-
1
def ret_node_group_ids(node_to_ng)
-
ng_ndx = Hash.new
-
node_to_ng.each_value{|h|h.each{|ng_id,ng|ng_ndx[ng_id] = true}}
-
ng_ndx.keys
-
end
-
end
-
-
# for components finds all components associated with a given nodes or a node group it belongs to
-
1
class AllMatching < self
-
1
private
-
# returns [nodes, node_to_ng]
-
# can be overrwitten
-
1
def self.get_nodes_and_node_to_ng_index(mh,opts)
-
unless nodes = opts[:nodes]
-
raise Error.new("Expecting opts[:nodes]")
-
end
-
node_filter = opts[:node_filter] || DTK::Node::Filter::NodeList.new(nodes.map{|n|n.id_handle()})
-
node_to_ng = DTK::NodeGroup.get_node_groups_containing_nodes(mh,node_filter)
-
[nodes,node_to_ng]
-
end
-
-
-
end
-
-
1
class SingleNode < AllMatching
-
1
private
-
# returns [nodes, node_to_ng]
-
# can be overrwitten
-
1
def self.get_nodes_and_node_to_ng_index(mh,opts)
-
unless node = opts[:node]
-
raise Error.new("Expecting opts[:nodes]")
-
end
-
super(mh,:nodes => [node])
-
end
-
-
1
def self.ret_node_sc_filter(target_idh,opts)
-
unless node = opts[:node]
-
raise Error.new("Expecting opts[:node]")
-
end
-
[:eq, :node_id, node[:id]]
-
end
-
end
-
-
1
class SingleNodeGroup < self
-
1
private
-
1
def self.ret_node_sc_filter(target_idh,opts)
-
unless node_group = opts[:node_group]
-
raise Error.new("Expecting opts[:node_group]")
-
end
-
nodes = node_group.get_node_group_members()
-
(!nodes.empty?) && [:oneof, :node_id, nodes.map{|r|r[:id]}]
-
end
-
-
# returns [nodes, node_to_ng]
-
# this is for finding node - ng relation given a specific ng
-
1
def self.get_nodes_and_node_to_ng_index(mh,opts)
-
unless node_group = opts[:node_group]
-
raise Error.new("Expecting opts[:node_group]")
-
end
-
nodes = node_group.get_node_group_members()
-
ng_id = node_group[:id]
-
node_to_ng = nodes.inject(Hash.new) do |h,n|
-
h.merge(n[:id] => {ng_id => true})
-
end
-
[nodes,node_to_ng]
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Target < Model
-
1
r8_nested_require('target','clone')
-
1
r8_nested_require('target','install_agents_helper')
-
1
r8_nested_require('target','iaas_properties')
-
1
r8_nested_require('target','instance')
-
1
r8_nested_require('target','template')
-
1
include Clone::Mixin
-
-
1
def model_name() #TODO: remove temp datacenter->target
-
:datacenter
-
end
-
##
-
1
def self.common_columns()
-
[
-
1
:id,
-
:display_name,
-
:name,
-
:description,
-
:type,
-
:iaas_type,
-
:iaas_properties,
-
:project_id,
-
:is_default_target,
-
:provider,
-
:ui
-
]
-
end
-
-
1
def self.name_to_id(model_handle,name)
-
filter = [:and, [:eq, :display_name, name], object_type_filter()]
-
name_to_id_helper(model_handle,name,:filter => filter)
-
end
-
-
1
def self.check_valid_id(model_handle,id)
-
filter = [:and, [:eq, :id, id], object_type_filter()]
-
check_valid_id_helper(model_handle,id,filter)
-
end
-
-
1
def name()
-
get_field?(:display_name)
-
end
-
-
1
def type()
-
get_field?(:type)
-
end
-
-
1
def is_default?()
-
get_field?(:is_default_target)
-
end
-
-
1
def info_about(about, opts={})
-
case about
-
when :assemblies
-
opts.merge!(:target_idh => id_handle())
-
Assembly::Instance.list(model_handle(:component), opts)
-
when :nodes
-
Node::TargetRef.list(self)
-
else
-
raise Error.new("TODO: not implemented yet: processing of info_about(#{about})")
-
end
-
end
-
-
1
def self.check_valid_id(model_handle,id)
-
check_valid_id_helper(model_handle,id,[:eq, :id, id])
-
end
-
-
1
def update_ui_for_new_item(new_item_id)
-
update_obj!(:ui)
-
target_ui = self[:ui]||{:items=>{}}
-
target_ui[:items][new_item_id.to_s.to_sym] = {}
-
update(:ui=>target_ui)
-
end
-
-
1
def get_ports(*types)
-
port_list = get_objs(:cols => [:node_ports]).map do |r|
-
component_id = (r[:link_def]||{})[:component_component_id]
-
component_id ? r[:port].merge(:component_id => component_id) : r[:port]
-
end
-
i18n = get_i18n_mappings_for_models(:component,:attribute)
-
port_list.map{|port|port.filter_and_process!(i18n,*types)}.compact
-
end
-
-
1
def get_node_group_members()
-
get_objs(:cols => [:node_members]).map{|r|r[:node_member]}
-
end
-
-
1
def get_project()
-
project_id = get_field?(:project_id)
-
id_handle(:id => project_id,:model_name => :project).create_object()
-
end
-
-
1
def get_node_config_changes()
-
nodes = get_objs(:cols => [:nodes]).map{|r|r[:node]}
-
ndx_changes = StateChange.get_ndx_node_config_changes(id_handle)
-
nodes.inject({}){|h,n|h.merge(n.id => ndx_changes[n.id]||StateChange.node_config_change__no_changes())}
-
end
-
-
1
def install_agents()
-
InstallAgentsHelper.install(self)
-
end
-
-
### TODO these should be moved to IAAS-spefic location
-
1
def get_iaas_type()
-
get_field?(:iaas_type)
-
end
-
-
1
def get_security_group()
-
get_iaas_properties()[:security_group]
-
end
-
-
1
def get_region()
-
get_iaas_properties()[:region]
-
end
-
-
1
def get_keypair()
-
get_iaas_properties()[:keypair]
-
end
-
-
1
def get_security_group_set()
-
get_iaas_properties()[:security_group_set]
-
end
-
-
# returns aws params if pressent in iaas properties
-
1
def get_aws_compute_params()
-
@iaas_props ||= get_iaas_properties()
-
if @iaas_props && (aws_key = @iaas_props[:key]) && (aws_secret = @iaas_props[:secret])
-
ret = { :aws_access_key_id => aws_key, :aws_secret_access_key => aws_secret }
-
if region = @iaas_props[:region]
-
ret.merge!(:region => region)
-
end
-
ret
-
end
-
end
-
-
### TODO end: these should be moved to IAAS-spefic location
-
-
1
def get_iaas_properties()
-
update_object!(:iaas_properties,:parent_id)
-
iaas_properties = self[:iaas_properties]
-
if parent_id = self[:parent_id]
-
parent_provider = id_handle(:id => parent_id).create_object(:model_name => :target_instance)
-
if parent_iaas_properties = parent_provider.get_field?(:iaas_properties)
-
# specific properties take precedence over the parent's
-
iaas_properties = parent_iaas_properties.merge(iaas_properties||{})
-
end
-
end
-
iaas_properties
-
end
-
-
1
def get_and_update_nodes_status()
-
nodes = get_objs(:cols => [:nodes]).map{|r|r[:node]}
-
nodes.inject({}){|h,n|h.merge(n.id => n.get_and_update_status!())}
-
end
-
-
1
def destroy_and_delete_nodes()
-
nodes = get_objs(:cols => [:nodes]).map{|r|r[:node]}
-
nodes.each{|n|n.destroy_and_delete()}
-
end
-
-
1
def get_violation_info(severity=nil)
-
get_objs(:columns => [:violation_info]).map do |r|
-
v = r[:violation]
-
if severity.nil? or v[:severity] == severity
-
v.merge(:target_node_display_name => (r[:node]||{})[:display_name])
-
end
-
end.compact
-
end
-
-
1
def add_item(source_id_handle,override_attrs={})
-
# TODO: need to copy in avatar when hash["ui"] is non null
-
override_attrs ||= {}
-
source_obj = source_id_handle.create_object()
-
clone_opts = source_obj.source_clone_info_opts()
-
new_obj = clone_into(source_obj,override_attrs,clone_opts)
-
new_obj && new_obj.id()
-
end
-
-
1
private
-
1
def sub_item_model_names()
-
[:node]
-
end
-
end
-
1
Datacenter = Target #TODO: remove temp datacenter->target
-
end
-
-
1
module DTK
-
1
class Target
-
1
module Clone
-
1
r8_nested_require('clone','special_node_attributes')
-
-
1
module Mixin
-
1
def clone_post_copy_hook(clone_copy_output,opts={})
-
case clone_copy_output.model_name()
-
when :component
-
Clone.component(self,clone_copy_output,opts)
-
when :node
-
Clone.node(self,clone_copy_output,opts)
-
else #TODO: catchall that will be expanded
-
new_id_handle = clone_copy_output.id_handles.first
-
StateChange.create_pending_change_item(:new_item => new_id_handle, :parent => id_handle())
-
end
-
end
-
end
-
-
1
def self.node(target,clone_copy_output,opts)
-
target.update_object!(:iaas_type,:iaas_properties)
-
new_id_handle = clone_copy_output.id_handles.first
-
#add external ref values from target to node if node does not have them
-
#assuming passed already check whether node consistent requirements with target
-
#TODO: not handling yet constraint form where set of possibilities given
-
node = clone_copy_output.objects.first
-
node_ext_ref = node[:external_ref]
-
target[:iaas_properties].each do |k,v|
-
unless node_ext_ref.has_key?(k)
-
node_ext_ref[k] = v
-
end
-
end
-
node.update(:external_ref => node_ext_ref)
-
StateChange.create_pending_change_item(:new_item => new_id_handle, :parent => target.id_handle())
-
end
-
-
1
def self.component(target,clone_copy_output,opts)
-
if assembly = clone_copy_output.assembly?(:subclass_object=>true)
-
assembly(target,assembly,clone_copy_output,opts)
-
else
-
raise Error.new("Not implemented clone of non assembly component to target")
-
end
-
end
-
-
1
private
-
1
def self.assembly(target,assembly,clone_copy_output,opts)
-
#clone_copy_output will be of form: assembly - node - component
-
-
#adjust link_def_id on ports
-
set_ports_link_def_and_cmp_ids(clone_copy_output)
-
-
#for port links that get generated by add on service
-
#TODO: currently not used; may deprecate create_add_on_port_and_attr_links?(target,clone_copy_output,opts)
-
-
level = 1
-
nodes = clone_copy_output.children_objects(level,:node,:cols=>[:display_name,:external_ref,:type])
-
return if nodes.empty?
-
SpecialNodeAttributes.process!(nodes)
-
-
# The method create_target_refs_and_links?
-
# - creates if needed target refs and links to them
-
# - moves node attributes to the target refs
-
# - returns any needed 'create node' state change objects, which designate that
-
# target ref node needs to be created as opposed to it exists already
-
nodes_for_create_sc = Node::TargetRef::Clone.new(target,assembly,nodes).create_target_refs_and_links?()
-
create_state_changes_for_create_node?(target,nodes_for_create_sc)
-
-
# Computing port_links (and also attribute links after create_target_refs_and_links
-
# because relying on the node attributes to be shifted to target refs if connected to target refs
-
port_link_idhs = clone_copy_output.children_id_handles(level,:port_link)
-
create_attribute_links__clone_if_needed(target,port_link_idhs)
-
-
-
if settings = opts[:service_settings]
-
settings.apply_settings(target,assembly)
-
end
-
-
begin
-
ModuleRefs::Lock.compute(assembly,:raise_errors => true).persist()
-
rescue ModuleRef::Missing::Error => e
-
includes = ModuleRefs::Tree.create(assembly).hash_form()
-
str_includes = keys_to_string(includes)
-
e.message << "\n#{str_includes.to_yaml}"
-
raise e
-
end
-
-
level = 2
-
component_child_hashes = clone_copy_output.children_hash_form(level,:component)
-
return if component_child_hashes.empty?
-
component_new_items = component_child_hashes.map do |child_hash|
-
{:new_item => child_hash[:id_handle], :parent => target.id_handle()}
-
end
-
-
StateChange.create_pending_change_items(component_new_items)
-
end
-
-
1
def self.keys_to_string(hash)
-
new_h = {}
-
hash.map do |key,value|
-
if value.is_a?(Hash)
-
value = keys_to_string(value)
-
end
-
new_h[key.to_s] = value
-
end
-
return new_h
-
end
-
-
1
def self.create_state_changes_for_create_node?(target,nodes)
-
#Do not create stages for node that are physical
-
pruned_nodes = nodes.reject do |node|
-
(node.get_field?(:external_ref)||{})[:type] == 'physical'
-
end
-
return if pruned_nodes.empty?
-
-
target_idh = target.id_handle()
-
node_new_items = pruned_nodes.map{|node|{:new_item => node.id_handle(), :parent => target_idh}}
-
sc_hashes = create_state_change_objects(target_idh,node_new_items)
-
create_state_changes_for_node_group_members(target_idh,pruned_nodes,sc_hashes)
-
nil
-
end
-
-
1
def self.create_state_changes_for_node_group_members(target_idh,nodes,sc_hashes)
-
ret = Array.new
-
node_groups = nodes.select{|n|n.is_node_group?()}
-
return ret if node_groups.empty?
-
ng_mh = node_groups.first.model_handle()
-
ndx_sc_ids = sc_hashes.inject(Hash.new){|h,sc|h.merge(sc[:node_id] => sc[:id])}
-
sc_mh = target_idh.createMH(:state_change)
-
new_items_hash = Array.new
-
ServiceNodeGroup.get_ndx_node_group_members(node_groups.map{|ng|ng.id_handle()}).each do |ng_id,node_members|
-
unless ng_state_change_id = ndx_sc_ids[ng_id]
-
Log.eror("Unexpected that ndx_sc_ihs[ng_id] is null")
-
next
-
end
-
ng_state_change_idh = sc_mh.createIDH(:id => ng_state_change_id)
-
node_members.each do |node|
-
new_items_hash << {:new_item => node.id_handle(), :parent => ng_state_change_idh}
-
end
-
end
-
create_state_change_objects(target_idh,new_items_hash)
-
end
-
-
1
def self.create_state_change_objects(target_idh,new_items_hash)
-
opts_sc = {:target_idh => target_idh,:returning_sql_cols => [:id,:display_name,:group_id,:node_id]}
-
StateChange.create_pending_change_items(new_items_hash,opts_sc)
-
end
-
-
1
def self.create_add_on_port_and_attr_links?(target,clone_copy_output,opts)
-
sao_proc = opts[:service_add_on_proc]
-
pl_hashes = sao_proc && sao_proc.get_matching_ports_link_hashes_in_target(clone_copy_output.id_handles.first)
-
unless pl_hashes.nil? or pl_hashes.empty?
-
# TODO: more efficient if had bulk create; also may consider better intergrating with creation of the assembly proper's port links
-
target_idh = target.id_handle()
-
pl_hashes.each do |port_link_hash|
-
PortLink.create_port_and_attr_links__clone_if_needed(target_idh,port_link_hash,opts)
-
end
-
end
-
end
-
-
1
def self.set_ports_link_def_and_cmp_ids(clone_copy_output)
-
ports = clone_copy_output.children_hash_form(2,:port).map{|r|r[:obj_info]}
-
return if ports.empty?
-
port_mh = clone_copy_output.children_id_handles(2,:port).first.createMH()
-
cmps = clone_copy_output.children_hash_form(2,:component).map{|r|r[:obj_info]}
-
link_defs = clone_copy_output.children_hash_form(3,:link_def).map{|r|r[:obj_info]}
-
Port.set_ports_link_def_and_cmp_ids(port_mh,ports,cmps,link_defs)
-
end
-
-
# find the port_links under the assembly and then add attribute_links associated with it
-
1
def self.create_attribute_links__clone_if_needed(target,port_link_idhs)
-
# TODO: this may be considered bug; but at this point assembly_id on port_links point to assembly library instance
-
return if port_link_idhs.empty?
-
sample_pl_idh = port_link_idhs.first
-
port_link_mh = sample_pl_idh.createMH()
-
sp_hash = {
-
:cols => [:id,:display_name,:group_id,:input_id,:output_id],
-
:filter => [:oneof,:id, port_link_idhs.map{|pl_idh|pl_idh.get_id()}]
-
}
-
Model.get_objs(port_link_mh,sp_hash).each do |port_link|
-
port_link.create_attribute_links__clone_if_needed(target.id_handle,:set_port_link_temporal_order=>true)
-
end
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Target
-
1
module Clone
-
1
module SpecialNodeAttributes
-
1
def self.process!(nodes)
-
process_name_attribute!(nodes)
-
process_cardinality_attribute!(nodes)
-
end
-
1
private
-
1
def self.process_name_attribute!(nodes)
-
constant_attr_fields = {:hidden => true}
-
nodes.each do |n|
-
name = n.get_field?(:display_name)
-
Node::NodeAttribute.create_or_set_attributes?([n],:name,name,constant_attr_fields)
-
end
-
end
-
-
1
def self.process_cardinality_attribute!(nodes)
-
# first set cardinality on node groups
-
ndx_cardinality = Hash.new
-
nodes.each do |n|
-
if n.is_node_group?()
-
if card = n[:target_refs_to_link] && n[:target_refs_to_link].size
-
(ndx_cardinality[card] ||= Array.new) << n
-
end
-
end
-
end
-
ndx_cardinality.each_pair do |card,nodes_to_set_card|
-
Node::NodeAttribute.create_or_set_attributes?(nodes_to_set_card,:cardinality,card)
-
end
-
Node.cache_attribute_values!(nodes,:cardinality)
-
end
-
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Target
-
1
class IAASProperties
-
1
r8_nested_require('iaas_properties','ec2')
-
1
attr_reader :name
-
# IAASProperties.new will be called with
-
# :name and :iaas_properties, or with
-
# :target_instance
-
1
def initialize(hash_args)
-
@name = hash_args[:name]
-
@iaas_properties = hash_args[:iaas_properties]
-
@target_instance = hash_args[:target_instance]
-
end
-
-
1
def properties()
-
iaas_properties()
-
end
-
-
1
def self.sanitize_and_modify_for_print_form!(type,iaas_properties)
-
unless type.nil? or iaas_properties.nil?
-
case type.to_sym
-
when :ec2
-
Ec2.sanitize!(iaas_properties)
-
Ec2.modify_for_print_form!(iaas_properties)
-
end
-
end
-
end
-
-
1
def self.more_specific_type?(type,iaas_properties)
-
unless type.nil? or iaas_properties.nil?
-
case type.to_sym
-
when :ec2
-
Ec2.more_specific_type?(iaas_properties)
-
end
-
end
-
end
-
-
1
def self.check(iaas_type,iaas_properties,opts={})
-
CommandAndControl.check_iaas_properties(iaas_type,iaas_properties,opts)
-
end
-
-
1
def hash()
-
iaas_properties()
-
end
-
-
1
def type()
-
unless ret = @target_instance.get_field?(:iaas_type)
-
Log.error("Expected that :iaas_type has a value")
-
end
-
ret && ret.to_sym
-
end
-
-
1
def supports_create_image?()
-
[:ec2].include?(type())
-
end
-
-
1
def iaas_properties()
-
@iaas_properties ||= (@target_instance && @target_instance.get_field?(:iaas_properties))||{}
-
end
-
-
1
def self.equal?(i2)
-
case type()
-
when :ec2 then Ec2.equal?(i2)
-
else raise Error.new("Unexpected iaas_properties type (#{type})")
-
end
-
end
-
end
-
end
-
end
-
-
2
module DTK; class Target
-
1
class IAASProperties
-
1
class Ec2 < self
-
1
def initialize(hash_args,provider=nil)
-
super(hash_args)
-
if provider
-
@provider = provider
-
#sanitizing what goes in provider_iaas_props, which is used for cloning targets
-
@provider_iaas_props = (provider.get_field?(:iaas_properties)||{}).reject{|k,v|[:key,:secret].include?(k)}
-
end
-
end
-
-
# returns an array of IAASProperties::Ec2 objects
-
1
def self.check_and_compute_needed_iaas_properties(target_name,ec2_type,provider,property_hash)
-
ret = Array.new
-
iaas_property_factory = new({:name => target_name},provider)
-
ret << iaas_property_factory.create_target_propeties(ec2_type,property_hash)
-
region = property_hash[:region]
-
if Ec2TypesNeedingAZTargets.include?(ec2_type)
-
# TODO: when have nested targets will nest availability zone targets in the one justa ssociarted with region
-
# add iaas_properties for targets created separately for every availability zone
-
provider.get_availability_zones(region).each do |az|
-
ret << iaas_property_factory.create_target_propeties(ec2_type,property_hash,:availability_zone => az)
-
end
-
end
-
ret
-
end
-
1
Ec2TypesNeedingAZTargets = [:ec2_classic]
-
-
-
1
def create_target_propeties(ec2_type,target_property_hash,params={})
-
iaas_properties = clone_and_check_manditory_params(target_property_hash)
-
iaas_properties = {:ec2_type => ec2_type}.merge(iaas_properties)
-
name = name()
-
if az = params[:availability_zone]
-
name = availbility_zone_target_name(name,az)
-
iaas_properties = {:availability_zone => az}.merge(iaas_properties)
-
end
-
self.class.new(:name => name,:iaas_properties => iaas_properties)
-
end
-
-
1
def self.equal?(i2)
-
i2.type == :ec2 and
-
iaas_properties[:region] == i2.iaas_properties[:region]
-
end
-
-
1
private
-
-
1
def availbility_zone_target_name(name,availbility_zone)
-
"#{name}-#{availbility_zone}"
-
end
-
-
1
def clone_and_check_manditory_params(target_property_hash)
-
ret = target_property_hash
-
unless target_property_hash[:keypair]
-
if keypair = @provider_iaas_props[:keypair]
-
ret = ret.merge(:keypair => keypair)
-
else
-
raise ErrorUsage.new("The target and its parent provider are both missing a keypair")
-
end
-
end
-
-
unless target_property_hash[:security_group] or target_property_hash[:security_group_set]
-
if security_group = @provider_iaas_props[:security_group]
-
ret = ret.merge(:security_group => security_group)
-
elsif security_group_set = @provider_iaas_props[:security_group_set]
-
ret = ret.merge(:security_group_set => security_group_set)
-
else
-
raise ErrorUsage.new("The target and its parent provider are both missing any security groups")
-
end
-
end
-
# using @provider[:iaas_properties] because has credentials)
-
unless props_with_creds = @provider[:iaas_properties]
-
Log.error("Unexpected that @provider[:iaas_properties] is nil")
-
return ret
-
end
-
props_with_creds = props_with_creds.merge(target_property_hash)
-
self.class.check(:ec2,props_with_creds,:properties_to_check => PropertiesToCheck)
-
-
ret
-
end
-
1
PropertiesToCheck = [:subnet] #TODO: will add more properties to check
-
-
-
1
def self.modify_for_print_form!(iaas_properties)
-
if iaas_properties[:security_group_set]
-
iaas_properties[:security_group] ||= iaas_properties[:security_group_set].join(',')
-
end
-
iaas_properties
-
end
-
-
1
def self.sanitize!(iaas_properties)
-
iaas_properties.reject!{|k,v|not SanitizedProperties.include?(k)}
-
end
-
1
SanitizedProperties = [:region,:keypair,:security_group,:security_group_set,:subnet,:ec2_type,:availability_zone]
-
-
1
def self.more_specific_type?(iaas_properties)
-
ec2_type = iaas_properties[:ec2_type]
-
case ec2_type && ec2_type.to_sym
-
when :ec2_vpc then :ec2_vpc
-
end
-
end
-
-
end
-
end
-
end; end
-
1
require 'thread'
-
1
require 'timeout'
-
1
require 'net/ssh'
-
1
require 'net/scp'
-
1
require 'mcollective'
-
-
2
module DTK; class Target
-
1
class InstallAgentsHelper
-
1
def initialize(target)
-
@target = target
-
end
-
1
def self.install(target)
-
new(target).install
-
end
-
1
def install()
-
# we get all the nodes that are 'unmanaged', meaning they are physical nodes that does not have node agent installed
-
unmanaged_nodes = @target.get_objs(:cols => [:unmanaged_nodes]).map{|r|r[:node]}
-
servers, install_script, mcollective_client = [], nil, nil
-
-
# TODO: better to use tempfile library; see how it is used in ../server/utils/internal/command_and_control/adapters/node_config/mcollective/config.rb
-
install_script_file_path = "#{R8.app_user_home()}/install_script"
-
FileUtils.mkdir(install_script_file_path) unless File.directory?(install_script_file_path)
-
-
# create mcollective-client instance
-
# not using our custom mcollective client because discover is not working properly with it
-
mcollective_client = ::MCollective::Client.new('/etc/mcollective/client.cfg')
-
mcollective_client.options = {}
-
-
# here we set information we need to connect to nodes via ssh
-
unmanaged_nodes.each do |node|
-
node.update_object!(:ref)
-
-
install_script = CommandAndControl.install_script(node)
-
install_script_file_name = "install_script_#{node[:id]}"
-
-
servers << {
-
"dtk_node_agent_location" => "#{R8.app_user_home()}/dtk-node-agent",
-
"install_script_file_path" => install_script_file_path,
-
"install_script_file_name" => install_script_file_name,
-
"node" => node,
-
"mcollective_client" => mcollective_client
-
}
-
-
File.open("#{install_script_file_path}/#{install_script_file_name}", 'w') do |f|
-
f.puts(install_script)
-
end
-
end
-
-
# add jobs to the queue
-
servers.each do |server|
-
Work.enqueue(SshJob, server)
-
end
-
-
# start the workers
-
Work.start
-
# wait for all jobs to finnish
-
begin
-
Work.drain
-
rescue Timeout::Error => e
-
# stop the workers
-
Work.stop
-
ensure
-
FileUtils.rm_rf(install_script_file_path)
-
end
-
end
-
-
# we use this module to handle multithreading, and if some node is not reachable or some error happens on the node
-
# we just ignore it
-
1
module Work
-
1
@queue = Queue.new
-
1
@n_threads = R8::Config[:workflow][:install_agents][:threads].to_i||10
-
1
@workers = []
-
1
@running = true
-
1
@servers_per_thread = 0
-
-
1
Job = Struct.new(:worker, :params)
-
-
1
module_function
-
1
def enqueue(worker, *params)
-
@queue << Job.new(worker, params)
-
end
-
-
1
def start
-
@servers_per_thread = (@queue.size/@n_threads) + 1
-
@n_threads.times do
-
@workers << Thread.new do
-
begin
-
@servers_per_thread.times.map {process_jobs}
-
ensure
-
Thread.current.exit
-
end
-
end
-
end
-
end
-
-
1
def process_jobs
-
while !@queue.empty?
-
job = nil
-
job = @queue.pop
-
job.worker.new.call(*job.params)
-
end
-
end
-
-
1
def drain
-
t_out = R8::Config[:workflow][:install_agents][:timeout].to_i||600
-
Timeout.timeout(t_out) do
-
loop do
-
break unless @workers.any?{|w| w.alive?}
-
sleep 1
-
end
-
end
-
end
-
-
1
def stop
-
@running = false
-
@workers.each do |t|
-
t.exit() if t.status.eql?('sleep')
-
end
-
end
-
end
-
-
# this is the job that will upload node agent to physical nodes using Net::SCP.upload! command
-
# and after that we execute some commands on the node itself using execute_ssh_command() method
-
1
class SshJob
-
1
def call(message)
-
Log.info_pp(['SshJob#call',:message,message[:node]])
-
node = message["node"]
-
mcollective_client = message["mcollective_client"]
-
external_ref = node.get_external_ref()
-
-
unless hostname = external_ref[:routable_host_address]
-
raise ErrorUsage.new("#{name_and_id(node)} is missing routable_host_address")
-
end
-
unless ssh_credentials = external_ref[:ssh_credentials]
-
raise ErrorUsage.new("#{name_and_id(node)} is missing ssh_credentials")
-
end
-
[:ssh_user,:ssh_password].each do |ssh_attr|
-
unless ssh_credentials[ssh_attr]
-
raise ErrorUsage.new("#{name_and_id(node)} is missing ssh_credentials field #{ssh_attr}")
-
end
-
end
-
-
params = {
-
:hostname => external_ref[:routable_host_address],
-
:user => ssh_credentials[:ssh_user],
-
:password => ssh_credentials[:ssh_password],
-
:port => ssh_credentials[:port]||"22",
-
:id => node.id()
-
}
-
-
-
# just to test taht can connect
-
begin
-
execute_ssh_command("ls /", params)
-
rescue Exception => e
-
Log.info_pp(['SshJob#call',:error,e, :params, params])
-
return
-
end
-
-
execute_ssh_command("rm -rf /tmp/dtk-node-agent", params)
-
-
Net::SCP.upload!(params[:hostname], params[:user],
-
"#{message["install_script_file_path"]}/#{message["install_script_file_name"]}", "/tmp",
-
:ssh => { :password => params[:password], :port => params[:port] }, :recursive => true)
-
-
Net::SCP.upload!(params[:hostname], params[:user],
-
message["dtk_node_agent_location"], "/tmp",
-
:ssh => { :password => params[:password], :port => params[:port] }, :recursive => true)
-
-
# perform installation
-
install_command = params[:user].eql?('root') ? "bash /tmp/dtk-node-agent/install_agent.sh" : "sudo bash /tmp/dtk-node-agent/install_agent.sh"
-
execute_ssh_command(install_command, params)
-
execute_ssh_command("rm -rf /tmp/dtk-node-agent", params)
-
-
install_script_command = params[:user].eql?('root') ? "bash /tmp/#{message['install_script_file_name']}" : "sudo bash /tmp/#{message['install_script_file_name']}"
-
execute_ssh_command(install_script_command, params)
-
execute_ssh_command("rm -rf /tmp/#{message['install_script_file_name']}", params)
-
-
# sleep set to 2 seconds to be sure that mcollective on node is ready to listen for discovery
-
sleep(2)
-
-
# send discover call filtered by 'pbuilderid'(node[:ref] == pbuilderid)
-
# if empty array is returned, agent on node is not working as expected
-
filter = {"fact"=>[{:fact=>"pbuilderid",:value=>node[:ref],:operator=>"=="}], "cf_class"=>[], "agent"=>[], "identity"=>[], "compound"=>[]}
-
discovered_data = CommandAndControl.discover(filter, 3, 1, mcollective_client)
-
-
# set managed = true only if mcollective from node returns valid response
-
if discovered_data.is_a?(Array)
-
node.update(:managed => true) unless discovered_data.empty?
-
else
-
node.update(:managed => true) unless (discovered_data.nil? && discovered_data.payload.nil?)
-
end
-
end
-
-
1
private
-
1
def name_and_id(node)
-
node.pp_name_and_id(:capitalize=>true)
-
end
-
-
1
def execute_ssh_command(command, params={})
-
Net::SSH.start(params[:hostname], params[:user], :password => params[:password], :port => params[:port]) do |ssh|
-
# capture all stderr and stdout output from a remote process
-
ssh.exec!(command) do |channel, stream, line|
-
puts "#{params[:hostname]} > #{line}"
-
end
-
end
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Target
-
1
class Instance < self
-
1
r8_nested_require('instance','default_target')
-
-
1
subclass_model :target_instance, :target, :print_form => 'target'
-
-
1
def info()
-
target = get_obj(:cols => [:display_name,:iaas_type,:iaas_properties,:is_default_target,:provider])
-
IAASProperties.sanitize_and_modify_for_print_form!(target[:iaas_type],target[:iaas_properties])
-
if provider_name = (target[:provider]||{})[:display_name]
-
target[:provider_name] = provider_name
-
end
-
OrderedInfoKeys.inject(Hash.new) do |h,k|
-
val = target[k]
-
val.nil? ? h : h.merge(k => val)
-
end
-
end
-
1
OrderedInfoKeys = [:display_name,:id,:provider_name,:iaas_properties,:is_default_target]
-
-
1
def iaas_properties()
-
IAASProperties.new(:target_instance => self)
-
end
-
-
1
def get_target_running_nodes()
-
Node::TargetRef.get_target_running_nodes(self)
-
end
-
-
# These properties are inherited ones for target instance: default provider -> target's provider -> target instance (most specific)
-
1
InheritedProperties = [:iaas_type,:iaas_properties,:type,:description]
-
-
1
def self.create_target_ec2(project_idh,provider,ec2_type,property_hash,opts={})
-
unless region = property_hash[:region]
-
raise ErrorUsage.new("Region is required for target created in '#{provider.get_field?(:iaas_type)}' provider type!")
-
end
-
-
target_name = opts[:target_name]|| provider.default_target_name(:region => region)
-
-
# proactively getting needed columns on provider
-
provider.update_obj!(*InheritedProperties)
-
-
# raises errors if problems with any params
-
iaas_properties_array = IAASProperties::Ec2.check_and_compute_needed_iaas_properties(target_name,ec2_type,provider,property_hash)
-
-
create_targets?(project_idh,provider,iaas_properties_array,:raise_error_if_exists=>true).first
-
end
-
-
1
def self.create_targets?(project_idh,provider,iaas_properties_array,opts={})
-
ret = Array.new
-
target_mh = project_idh.createMH(:target)
-
provider.update_obj!(*InheritedProperties)
-
provider_id = provider.id
-
create_rows = iaas_properties_array.map do |iaas_properties|
-
display_name = iaas_properties.name
-
ref = display_name.downcase.gsub(/ /,"-")
-
specific_params = {
-
:parent_id => provider_id,
-
:ref => ref,
-
:display_name => display_name,
-
:type => 'instance'
-
}
-
-
el = provider.hash_subset(:iaas_type,:type,:description).merge(specific_params)
-
-
# need deep merge for iaas_properties
-
el.merge(:iaas_properties => iaas_properties.properties)
-
end
-
-
# check if there are any matching target instances that are created already
-
disjunct_array = create_rows.map do |r|
-
[:and, [:eq, :parent_id, r[:parent_id]],
-
[:eq, :display_name, r[:display_name]]]
-
end
-
sp_hash = {
-
:cols => [:id,:display_name,:parent_id],
-
:filter => [:or] + disjunct_array
-
}
-
existing_targets = get_these_objs(target_mh,sp_hash)
-
unless existing_targets.empty?
-
if opts[:raise_error_if_exists]
-
existing_names = existing_targets.map{|et|et[:display_name]}.join(',')
-
obj_type = pp_object_type(existing_targets.size)
-
raise ErrorUsage.new("The #{obj_type} (#{existing_names}) exist(s) already")
-
else
-
create_rows.reject! do |r|
-
parent_id = r[:parent_id]
-
name = r[:display_name]
-
existing_targets.find{|et|et[:parent_id] == parent_id and et[:display_name] == name}
-
end
-
end
-
end
-
-
return ret if create_rows.empty?
-
create_opts = {:convert => true, :ret_obj => {:model_name => :target_instance}}
-
create_from_rows(target_mh,create_rows,create_opts)
-
end
-
-
1
class DeleteResponseObject
-
1
def initialize(target)
-
@target_name = target.get_field?(:display_name)
-
@info = Hash.new
-
end
-
1
def add_info_changed_default_target!(new_default_target)
-
@info[:changed_default_target] = new_default_target
-
end
-
1
def add_info_changed_workspace_target!(new_default_target)
-
@info[:changed_workspace_target] = new_default_target
-
end
-
-
1
def hash_form()
-
ret = Hash.new
-
return ret if @info.empty?()
-
default_target = @info[:changed_default_target]
-
workspace_target = @info[:changed_workspace_target]
-
if default_target and workspace_target and default_target.id == workspace_target.id
-
add_changed_target!(ret,default_target,:default_and_workspace)
-
else
-
add_changed_target!(ret,default_target,:default) if default_target
-
add_changed_target!(ret,workspace_target,:workspace) if workspace_target
-
end
-
ret
-
end
-
1
private
-
1
def add_changed_target!(ret,new_target,role)
-
new_target_name = new_target.get_field?(:display_name)
-
this_setting = (role == :default_and_target ? 'these target settings' : 'this target setting')
-
role_str = role.to_s.gsub(/_/,' ')
-
msg = "Deleted '#{@target_name}' that was #{role_str} target; changed #{this_setting} to '#{new_target_name}'"
-
(ret[:info] ||= Array.new) << msg
-
ret
-
end
-
end
-
-
# returns hash that has response info
-
1
def self.delete_and_destroy(target)
-
response_obj = DeleteResponseObject.new(target)
-
if target.is_builtin_target?()
-
raise ErrorUsage.new("Cannot delete the builtin target")
-
end
-
-
target_mh = target.model_handle()
-
builtin_target = get_builtin_target(target_mh)
-
current_default_target = DefaultTarget.get(target_mh)
-
-
Transaction do
-
# change default target if pointing to this target
-
if current_default_target and current_default_target.id == target.id
-
response_obj.add_info_changed_default_target!(builtin_target)
-
DefaultTarget.set(builtin_target,:current_default_target => current_default_target,:update_workspace_target => false)
-
end
-
-
assemblies = Assembly::Instance.get(target.model_handle(:assembly_instance),:target_idh => target.id_handle())
-
assemblies.each do |assembly|
-
if workspace = Workspace.workspace?(assembly)
-
# modify workspace target if it points to the one being deleted
-
if current_workspace_target = workspace.get_target()
-
if current_workspace_target.id == target.id
-
response_obj.add_info_changed_workspace_target!(builtin_target)
-
workspace.set_target(builtin_target, :mode => :from_delete_target)
-
end
-
end
-
-
workspace.purge(:destroy_nodes => true)
-
else
-
Assembly::Instance.delete(assembly.id_handle,:destroy_nodes => true)
-
end
-
end
-
delete_instance(target.id_handle())
-
end
-
response_obj.hash_form()
-
end
-
-
1
def self.set_default_target(target,opts={})
-
current_default_target = DefaultTarget.set(target,opts)
-
ResponseInfo.info("Default target changed from ?current_default_target to ?new_default_target",
-
:current_default_target => current_default_target,
-
:new_default_target => target)
-
end
-
-
-
1
def self.get_default_target(target_mh,cols=[])
-
DefaultTarget.get(target_mh,cols)
-
end
-
-
1
def self.set_properties(target,iaas_properties)
-
target.update_obj!(:iaas_properties)
-
current_properties = target[:iaas_properties]
-
-
# convert string keys to symbols ({'keypair' => 'default'} to {:keypair => 'default'})
-
iaas_properties = iaas_properties.inject({}){|memo,(k,v)| memo[k.to_sym] = v; memo}
-
-
# avoid having security_group and security_group_set in one iaas_properties
-
if iaas_properties[:security_group_set] || iaas_properties[:security_group]
-
current_properties.delete(iaas_properties[:security_group] ? :security_group_set : :security_group)
-
end
-
-
hash_assignments = {:iaas_properties => current_properties.merge(iaas_properties)}
-
Model.update_from_hash_assignments(target.id_handle(),hash_assignments)
-
end
-
-
1
def self.list(target_mh,opts={})
-
filter = [:neq,:type,'template']
-
if opts[:filter]
-
filter = [:and,filter,opts[:filter]]
-
end
-
sp_hash = {
-
:cols => [:id, :display_name, :iaas_type, :type, :parent_id, :iaas_properties, :provider, :is_default_target],
-
:filter => filter
-
}
-
unsorted_rows = get_these_objs(target_mh, sp_hash)
-
unsorted_rows.each do |t|
-
if t.is_builtin_target?()
-
set_builtin_provider_display_fields!(t)
-
end
-
IAASProperties.sanitize_and_modify_for_print_form!(t[:iaas_type],t[:iaas_properties])
-
if provider = t[:provider]
-
IAASProperties.sanitize_and_modify_for_print_form!(provider[:iaas_type],provider[:iaas_properties])
-
# modifies iaas_type to make more specfic
-
if specific_iaas_type = IAASProperties.more_specific_type?(t[:iaas_type],t[:iaas_properties])
-
provider[:iaas_type] = specific_iaas_type
-
end
-
end
-
end
-
# sort by 1-whether default, 2-iaas_type, 3-display_name
-
unsorted_rows.sort do |a,b|
-
[a[:is_default_target] ? 0 : 1, a[:iaas_type], a[:display_name]] <=>
-
[b[:is_default_target] ? 0 : 1, b[:iaas_type], b[:display_name]]
-
end
-
end
-
-
1
DefaultTargetMark = '*'
-
-
1
def is_builtin_target?()
-
get_field?(:parent_id).nil?
-
end
-
-
1
def self.import_nodes(target, inventory_data)
-
Node::TargetRef.create_nodes_from_inventory_data(target, inventory_data)
-
end
-
-
1
private
-
1
def self.get_builtin_target(target_mh)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name],
-
:filter => [:and,[:eq,:parent_id,nil],[:eq,:type,'staging']]
-
}
-
rows = get_objs(target_mh,sp_hash)
-
unless rows.size == 1
-
Log.error("Unexpected that get_builtin_target returned '#{rows.size.to_s}' rows")
-
return nil
-
end
-
rows.first
-
end
-
-
# TODO: right now type can be different values for insatnce; may cleanup so its set to 'instance'
-
1
def self.object_type_filter()
-
[:neq,:type,'template']
-
end
-
-
1
def self.display_name_from_provider_and_region(provider,region)
-
"#{provider.base_name()}-#{region}"
-
end
-
-
1
def self.set_builtin_provider_display_fields!(target)
-
target.merge!(:provider => BuiltinProviderDisplayHash)
-
end
-
-
1
BuiltinProviderDisplayHash = {:iaas_type=>'ec2', :display_name=>'DTK-BUILTIN'}
-
end
-
end
-
end
-
2
module DTK; class Target
-
1
class Instance
-
1
module DefaultTarget
-
1
def self.get(target_mh,cols=[])
-
cols = [:id,:display_name,:group_id] if cols.empty?
-
sp_hash = {
-
:cols => cols,
-
:filter => [:eq,:is_default_target,true]
-
}
-
ret = Target::Instance.get_obj(target_mh,sp_hash)
-
ret && ret.create_subclass_obj(:target_instance)
-
end
-
-
# returns current_default_target
-
# opts can be
-
# :current_default_target (computed already)
-
# :update_workspace_target
-
1
def self.set(target,opts={})
-
ret = current_default_target = opts[:current_default_target] || get(target.model_handle(),[:display_name])
-
return ret unless target
-
-
if current_default_target && (current_default_target.id == target.id)
-
raise ErrorUsage::Warning.new("Default target is already set to #{current_default_target[:display_name]}")
-
end
-
-
Model.Transaction do
-
current_default_target.update(:is_default_target => false) if current_default_target
-
target.update(:is_default_target => true)
-
if opts[:update_workspace_target]
-
# also set the workspace with this target
-
Workspace.set_target(target,:mode => :from_set_default_target)
-
end
-
end
-
ret
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
# This is a provider
-
1
class Target
-
1
class Template < self
-
1
subclass_model :target_template, :target, :print_form => 'provider'
-
-
1
def self.list(target_mh)
-
sp_hash = {
-
:cols => common_columns(),
-
:filter => object_type_filter()
-
}
-
get_these_objs(target_mh, sp_hash)
-
end
-
-
1
def self.create_provider?(project_idh, iaas_type, provider_name, iaas_properties_hash, params_hash={}, opts={})
-
if existing_provider = provider_exists?(project_idh, provider_name)
-
if opts[:raise_error_if_exists]
-
raise ErrorUsage.new("Provider (#{provider_name}) exists already")
-
else
-
return existing_provider
-
end
-
end
-
-
iaas_properties = IAASProperties.check(iaas_type,iaas_properties_hash)
-
-
target_mh = project_idh.createMH(:target)
-
display_name = provider_display_name(provider_name)
-
ref = display_name.downcase.gsub(/ /,"-")
-
create_row = {
-
:iaas_type => iaas_type.to_s,
-
:project_id => project_idh.get_id(),
-
:type => 'template',
-
:ref => ref,
-
:display_name => display_name,
-
:description => params_hash[:description],
-
:iaas_properties => iaas_properties
-
}
-
create_opts = {:convert => true, :ret_obj => {:model_name => :target_template}}
-
create_from_row(target_mh,create_row,create_opts)
-
end
-
-
1
class DeleteResponse < Hash
-
1
def add_target_response(hash)
-
hash.each_pair do |msg_type,msg_array|
-
pntr = (self[msg_type] ||= Array.new)
-
msg_array.each{|msg|pntr << msg}
-
end
-
self
-
end
-
end
-
1
def self.delete_and_destroy(provider,opts={})
-
response = DeleteResponse.new()
-
unless opts[:force]
-
assembly_instances = provider.get_assembly_instances(:omit_empty_workspace => true)
-
unless assembly_instances.empty?
-
assembly_names = assembly_instances.map{|a|a[:display_name]}.join(',')
-
provider_name = provider.get_field?(:display_name)
-
raise ErrorUsage.new("Cannot delete provider '#{provider_name}' because service instance(s) (#{assembly_names}) are using one of its targets")
-
end
-
end
-
-
target_instances = provider.get_target_instances(:cols => [:display_name,:is_default_target])
-
Transaction do
-
target_instances.each do |target_instance|
-
target_delete_response = Instance.delete_and_destroy(target_instance)
-
response.add_target_response(target_delete_response)
-
end
-
delete_instance(provider.id_handle())
-
end
-
response
-
end
-
-
1
def create_bootstrap_targets?(project_idh,region_or_regions=nil)
-
# for succinctness
-
r = region_or_regions
-
regions =
-
if r.kind_of?(Array) then r
-
elsif r.kind_of?(String) then [r]
-
else R8::Config[:ec2][:regions]
-
end
-
-
common_iaas_properties = get_field?(:iaas_properties)
-
# DTK-1735 DO NOT copy aws key and secret from provider to target
-
common_iaas_properties.delete_if{|k,v| [:key, :secret].include?(k)}
-
-
iaas_properties_list = regions.map do |region|
-
name = default_target_name(:region => region)
-
properties = common_iaas_properties.merge(:region => region)
-
IAASProperties.new(:name => name, :iaas_properties => properties)
-
end
-
Instance.create_targets?(project_idh,self,iaas_properties_list)
-
end
-
-
1
def get_availability_zones(region)
-
CommandAndControl.get_and_process_availability_zones(get_field?(:iaas_type), get_field?(:iaas_properties).merge(:region => region),region)
-
end
-
-
-
1
def get_assembly_instances(opts={})
-
ret = Array.new
-
target_instances = id_handle.create_object().get_target_instances()
-
unless target_instances.empty?
-
ret = Assembly::Instance.get(model_handle(:assembly_instance),:target_idhs => target_instances.map{|t|t.id_handle})
-
if opts[:omit_empty_workspace]
-
ret.reject! do |assembly|
-
if Workspace.is_workspace?(assembly)
-
assembly.get_nodes().empty?
-
end
-
end
-
end
-
end
-
ret
-
end
-
-
1
def get_target_instances(opts={})
-
sp_hash = {
-
:cols => add_default_cols?(opts[:cols]),
-
:filter => [:eq,:parent_id,id()]
-
}
-
Target::Instance.get_objs(model_handle(:target_instance),sp_hash)
-
end
-
-
# TODO: move to be processed by IAAS specfic
-
1
def default_target_name(hash_params)
-
if Aux.has_just_these_keys?(hash_params,[:region])
-
"#{base_name()}-#{hash_params[:region]}"
-
else
-
raise Error.new("Not implemented when hash_parsm keys are: #{hash_params.keys.join(',')}")
-
end
-
end
-
1
private
-
1
def base_name()
-
# get_field?(:display_name).gsub(Regexp.new("#{DisplayNameSufix}$"),'')
-
get_field?(:display_name)
-
end
-
-
1
def self.object_type_filter()
-
[:eq,:type,'template']
-
end
-
-
1
def self.provider_display_name(provider_name)
-
# "#{provider_name}#{DisplayNameSufix}"
-
provider_name
-
end
-
# removed '-template' from provider display_name (ticket DTK-1480)
-
# DisplayNameSufix = '-template'
-
-
1
def self.provider_exists?(project_idh,provider_name)
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,[:eq,:display_name,provider_display_name(provider_name)],
-
[:eq,:project_id,project_idh.get_id()]]
-
}
-
get_obj(project_idh.createMH(:target_template),sp_hash)
-
end
-
-
end
-
end
-
end
-
1
module DTK
-
1
class Task < Model
-
1
r8_nested_require('task','create')
-
1
r8_nested_require('task','status')
-
1
r8_nested_require('task','action')
-
1
r8_nested_require('task','template')
-
1
r8_nested_require('task','stage')
-
1
r8_nested_require('task','node_group_processing')
-
1
r8_nested_require('task','action_results')
-
1
r8_nested_require('task','qualified_index')
-
-
1
extend CreateClassMixin
-
1
include StatusMixin
-
1
include NodeGroupProcessingMixin
-
1
include Status::TableForm::Mixin
-
1
include ActionResults::Mixin
-
-
1
def self.common_columns()
-
[
-
:id,
-
:display_name,
-
:group_id,
-
:status,
-
:result,
-
:updated_at,
-
:created_at,
-
:started_at,
-
:ended_at,
-
:task_id,
-
:temporal_order,
-
:position,
-
:executable_action_type,
-
:executable_action,
-
:commit_message,
-
:assembly_id,
-
:target_id
-
]
-
end
-
-
# can be :sequential, :concurrent, :executable_action, or :decomposed_node_group
-
1
def basic_type()
-
if ea = self[:executable_action]
-
ea[:decomposed_node_group] ? :decomposed_node_group : :executable_action
-
elsif self[:temporal_order] == "sequential"
-
:sequential
-
elsif self[:temporal_order] == "concurrent"
-
:concurrent
-
end
-
end
-
-
# can be :sequential, :concurrent, or :leaf
-
1
def temporal_type()
-
case basic_type()
-
when :decomposed_node_group,:concurrent then :concurrent
-
when :sequential then :sequential
-
else :leaf
-
end
-
end
-
-
# returns list (possibly empty) of subtask idhs that guard this
-
1
def guarded_by(external_guards)
-
ret = Array.new
-
ea = executable_action()
-
return ret unless node_id = ea.respond_to?(:node_id) && ea.node_id
-
task_ids = external_guards.select{|g|g[:guarded][:node][:id]}.map{|g|g[:guard][:task_id]}.uniq
-
task_ids.map{|task_id|id_handle(:id => task_id)}
-
end
-
-
1
def assembly()
-
if assembly_id = get_field?(:assembly_id)
-
id_handle(:model_name => :assembly,:id => assembly_id).create_object()
-
end
-
end
-
-
1
def get_errors()
-
sp_hash = {
-
:cols => [:content]
-
}
-
get_children_objs(:task_error,sp_hash).map{|r|r[:content]}
-
end
-
-
# indexed by task ids
-
1
def get_ndx_errors()
-
self.class.get_ndx_errors(hier_task_idhs())
-
end
-
1
def self.get_ndx_errors(task_idhs)
-
ret = Array.new
-
return ret if task_idhs.empty?
-
sp_hash = {
-
:cols => [:task_id,:content],
-
:filter => [:oneof,:task_id,task_idhs.map{|idh|idh.get_id()}]
-
}
-
task_error_mh = task_idhs.first.createMH(:task_error)
-
ret = Hash.new
-
Model.get_objs(task_error_mh,sp_hash).each do |r|
-
task_id = r[:task_id]
-
ret[task_id] = (ret[task_id]||Array.new) + [r[:content]]
-
end
-
ret
-
end
-
-
1
def get_events()
-
sp_hash = {:cols => [:created_at, :type, :content]}
-
get_children_objs(:task_event,sp_hash).sort{|a,b| a[:created_at] <=> b[:created_at]}
-
end
-
-
1
def get_logs()
-
ret_logs = Hash.new
-
sp_hash = {:cols => [:task_id, :display_name, :content, :parent_task]}
-
ret = get_children_objs(:task_log, sp_hash).sort{|a,b| a[:created_at] <=> b[:created_at]}
-
-
ret.each do |r|
-
task_id = r[:task_id]
-
content = r[:content] || Hash.new
-
content.merge!({:label => r[:display_name], :task_name => r[:task][:display_name]})
-
ret_logs[task_id] = (ret_logs[task_id]||Array.new) + [content]
-
end
-
-
ret_logs
-
end
-
-
1
def get_ndx_logs()
-
self.class.get_ndx_logs(hier_task_idhs())
-
end
-
1
def self.get_ndx_logs(task_idhs)
-
ret = Array.new
-
return ret if task_idhs.empty?
-
sp_hash = {
-
:cols => [:task_id, :content, :display_name, :parent_task],
-
:filter => [:oneof, :task_id, task_idhs.map{|idh|idh.get_id()}]
-
}
-
task_log_mh = task_idhs.first.createMH(:task_log)
-
ret = Hash.new
-
Model.get_objs(task_log_mh, sp_hash).each do |r|
-
task_id = r[:task_id]
-
content = r[:content]
-
content.merge!({:label => r[:display_name], :task_name => r[:task][:display_name]})
-
ret[task_id] = (ret[task_id]||Array.new) + [content]
-
end
-
ret
-
end
-
-
1
def add_event(event_type,result=nil)
-
if event = TaskEvent.create_event?(event_type,self,result)
-
type = event.delete(:type)||event_type
-
row = {
-
:content => event.to_hash,
-
:ref => "task_event",
-
:type => type.to_s,
-
:task_id => id()
-
}
-
Model.create_from_rows(child_model_handle(:task_event),[row],{:convert => true})
-
event
-
end
-
end
-
-
# returns [event,error-array]
-
1
def add_event_and_errors(event_type,error_source,errors_in_result)
-
ret = [nil,nil]
-
# process errors and strip out from what is passed to add event
-
normalized_errors =
-
if error_source == :config_agent
-
config_agent = get_config_agent()
-
components = component_actions().map{|a|a[:component]}
-
errors_in_result.map{|err|config_agent.interpret_error(err,components)}
-
else
-
# TODO: stub
-
errors_in_result
-
end
-
errors = add_errors(normalized_errors)
-
# TODO: want to remove calls in function below from needing to know result format
-
event = add_event(event_type,{:data => {:errors => errors_in_result}})
-
[event,errors]
-
end
-
-
1
def is_status?(status)
-
return self[:status] == status || self[:subtasks].find{ |subtask| subtask[:status] == status }
-
end
-
-
1
def add_errors(normalized_errors)
-
ret = nil
-
return ret unless normalized_errors and not normalized_errors.empty?
-
rows = normalized_errors.map do |err|
-
{
-
:content => err,
-
:ref => "task_error",
-
:task_id => id()
-
}
-
end
-
Model.create_from_rows(child_model_handle(:task_error),rows,{:convert => true})
-
normalized_errors
-
end
-
-
1
def update_task_subtask_status(status,result)
-
self[:subtasks].each do |subtask|
-
if subtask[:subtasks]
-
subtask[:subtasks].each do |child_subtask|
-
child_subtask.update_at_task_completion(status, result)
-
end
-
end
-
subtask.update_at_task_completion(status, result)
-
end
-
self.update_at_task_completion(status, result)
-
end
-
-
1
def update_at_task_completion(status,result)
-
update_hash = {
-
:status => status,
-
:result => result,
-
:ended_at => Aux::now_time_stamp()
-
}
-
update(update_hash)
-
end
-
1
def update_at_task_start(opts={})
-
update(:status => "executing", :started_at => Aux::now_time_stamp())
-
end
-
-
1
def update_when_failed_preconditions(failed_antecedent_tasks)
-
ts = Aux::now_time_stamp()
-
update(:status => "preconditions_failed", :started_at => ts, :ended_at => ts)
-
# TODO: put in context about failure in errors
-
end
-
-
# TODO: update and update_parents can be cleaned up because halfway between update and update_object!
-
# this updates self, which is leaf node, plus all parents
-
1
def update(update_hash,opts={})
-
super(update_hash)
-
unless opts[:dont_update_parents] or (update_hash.keys & [:status,:started_at,:ended_at]).empty?
-
if task_id = update_object!(:task_id)[:task_id]
-
update_parents(update_hash.merge(:task_id => task_id))
-
end
-
end
-
end
-
-
# updates parent fields that are fn of children (:status,:started_at,:ended_at)
-
1
def update_parents(child_hash)
-
parent = id_handle.createIDH(:id => child_hash[:task_id]).create_object().update_object!(:status,:started_at,:ended_at,:children_status)
-
key = id().to_s.to_sym #TODO: look at avoiding this by having translation of json not make num keys into symbols
-
children_status = (parent[:children_status]||{}).merge!(key => child_hash[:status])
-
-
parent_updates = {:children_status => children_status}
-
# compute parent start time
-
unless parent[:started_at] or child_hash[:started_at].nil?
-
parent_updates.merge!(:started_at => child_hash[:started_at])
-
end
-
-
# compute new parent status
-
subtask_status_array = children_status.values
-
parent_status =
-
if subtask_status_array.include?("executing") then "executing"
-
elsif subtask_status_array.include?("failed") then "failed"
-
elsif subtask_status_array.include?("cancelled") then "cancelled"
-
elsif not subtask_status_array.find{|s|s != "succeeded"} then "succeeded" #all succeeded
-
else "executing" #if reach here must be some created and some finished
-
end
-
unless parent_status == parent[:status]
-
parent_updates.merge!(:status => parent_status)
-
# compute parent end time which can only change if parent changed to "failed" or "succeeded"
-
if ["failed","succeeded"].include?(parent_status) and child_hash[:ended_at]
-
parent_updates.merge!(:ended_at => child_hash[:ended_at])
-
end
-
end
-
-
dont_update_parents = (parent_updates.keys - [:children_status]).empty?
-
parent.update(parent_updates, :dont_update_parents => dont_update_parents)
-
end
-
1
private :update_parents
-
-
1
def update_input_attributes!()
-
# updates ruby task object
-
executable_action().get_and_update_attributes!(self)
-
end
-
-
1
def add_internal_guards!(guards)
-
# updates ruby task object
-
executable_action().add_internal_guards!(guards)
-
end
-
-
1
def self.get_top_level_most_recent_task(model_handle,filter=nil)
-
# TODO: can be more efficient if do sql query with order and limit 1
-
tasks = get_top_level_tasks(model_handle,filter).sort{|a,b| b[:updated_at] <=> a[:updated_at]}
-
tasks && tasks.first
-
end
-
-
1
def self.get_top_level_tasks(model_handle,filter=nil)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,:status,:updated_at,:executable_action_type,:commit_message],
-
:filter => [:and,[:eq,:task_id,nil], #so this is a top level task
-
filter].compact
-
}
-
get_objs(model_handle,sp_hash).reject{|k,v|k == :subtasks}
-
end
-
-
1
def self.get_most_recent_top_level_task(model_handle)
-
get_top_level_tasks(model_handle).sort{|a,b| b[:updated_at] <=> a[:updated_at]}.first
-
end
-
-
-
1
def get_associated_nodes()
-
exec_actions = Array.new
-
# if executable level then get its executable_action
-
if self.has_key?(:executable_action_type)
-
# will have an executable action so if have it already
-
if self[:executable_action_type]
-
exec_actions << get_field?(:executable_action)
-
end
-
else
-
if exec_action = get_field?(:executable_action)
-
exec_actions << exec_action.merge(:task_id => id())
-
end
-
end
-
-
# if task does not have execuatble actions then get all subtasks
-
if exec_actions.empty?
-
exec_actions = get_all_subtasks().map do |t|
-
action = t[:executable_action]
-
action && action.merge(:task_id => t.id())
-
end.compact
-
end
-
-
# get all unique nodes; looking for attribute :external_ref
-
indexed_nodes = Hash.new
-
exec_actions.each do |ea|
-
next unless node = ea[:node]
-
node_id = node[:id]
-
indexed_nodes[node_id] ||= node.merge(:task_id => ea[:task_id])
-
indexed_nodes[node_id][:external_ref] ||= node[:external_ref]
-
indexed_nodes[node_id][:config_agent_type] ||= get_config_agent_type(ea)
-
end
-
-
# need to query db if missing external_refs having instance_id
-
node_ids_missing_ext_refs = indexed_nodes.values.reject{|n|(n[:external_ref]||{})[:instance_id]}.map{|n|n[:id]}
-
unless node_ids_missing_ext_refs.empty?
-
sp_hash = {
-
:cols => [:id,:external_ref],
-
:filter => [:oneof, :id, node_ids_missing_ext_refs]
-
}
-
node_mh = model_handle.createMH(:node)
-
node_objs = Model.get_objs(node_mh,sp_hash)
-
node_objs.each{|r|indexed_nodes[r[:id]][:external_ref] = r[:external_ref]}
-
end
-
indexed_nodes.values
-
end
-
-
1
def get_config_agent_type(executable_action=nil, opts={})
-
executable_action ||= executable_action(opts)
-
executable_action.config_agent_type() if executable_action && executable_action.respond_to?('config_agent_type')
-
end
-
1
def get_config_agent()
-
ConfigAgent.load(get_config_agent_type())
-
end
-
1
private :get_config_agent
-
# private :get_config_agent_type, :get_config_agent
-
-
# recursively walks structure, but returns them in flat list
-
1
def get_all_subtasks()
-
self.class.get_all_subtasks([id_handle])
-
end
-
1
def self.get_all_subtasks(top_id_handles)
-
ret = Array.new
-
id_handles = top_id_handles
-
until id_handles.empty?
-
model_handle = id_handles.first.createMH()
-
sp_hash = {
-
:cols => Task.common_columns(),
-
:filter => [:oneof,:task_id,id_handles.map{|idh|idh.get_id}]
-
}
-
next_level_objs = get_objs(model_handle,sp_hash).reject{|k,v|k == :subtasks}
-
next_level_objs.each{|st|st.reify!()}
-
id_handles = next_level_objs.map{|obj|obj.id_handle}
-
-
ret += next_level_objs
-
end
-
ret
-
end
-
-
1
def get_all_subtasks_with_logs()
-
self.class.get_all_subtasks_with_logs([id_handle])
-
end
-
1
def self.get_all_subtasks_with_logs(top_id_handles)
-
ret = Array.new
-
id_handles = top_id_handles
-
until id_handles.empty?
-
model_handle = id_handles.first.createMH()
-
sp_hash = {
-
:cols => [:id, :display_name],
-
:filter => [:oneof, :task_id, id_handles.map{|idh|idh.get_id}]
-
}
-
next_level_objs = get_objs(model_handle,sp_hash).reject{|k,v|k == :subtasks}
-
next_level_objs.each{|st|st.reify!()}
-
id_handles = next_level_objs.map{|obj|obj.id_handle}
-
-
ret += next_level_objs
-
end
-
ret
-
end
-
-
1
def reify!()
-
self[:executable_action] &&= Action::OnNode.create_from_hash(self[:executable_action_type],self[:executable_action],id_handle)
-
end
-
-
1
def self.get_hierarchical_structure(top_task_idh)
-
sp_hash = {
-
:cols => Task.common_columns(),
-
:filter => [:eq, :id, top_task_idh.get_id()]
-
}
-
top_task = get_objs(top_task_idh.createMH(),sp_hash).first
-
return nil unless top_task
-
flat_subtask_list = top_task.get_all_subtasks()
-
ndx_task_list = {top_task.id => top_task}
-
subtask_count = Hash.new
-
subtask_indexes = Hash.new
-
flat_subtask_list.each do |t|
-
ndx_task_list[t.id] = t
-
parent_id = t[:task_id]
-
subtask_count[parent_id] = (subtask_count[parent_id]||0) +1
-
subtask_indexes[t.id] = {:parent_id => parent_id,:index => t[:position]}
-
end
-
-
subtask_qualified_indexes = QualifiedIndex.compute!(subtask_indexes,top_task)
-
-
flat_subtask_list.each do |subtask|
-
subtask[QualifiedIndex::Field] = subtask_qualified_indexes[subtask[:id]][QualifiedIndex::Field]
-
parent_id = subtask[:task_id]
-
parent = ndx_task_list[parent_id]
-
if subtask.node_group_member?()
-
subtask.set_node_group_member_executable_action!(parent)
-
end
-
(parent[:subtasks] ||= Array.new(subtask_count[parent_id]))[subtask[:position]-1] = subtask
-
end
-
top_task
-
end
-
-
1
def ret_command_and_control_adapter_info()
-
# TODO: stub
-
[:node_config, nil]
-
end
-
-
# persists to db this and its sub tasks
-
1
def save!()
-
# no op if saved already as detected by whether has an id
-
return nil if id()
-
set_positions!()
-
# for db access efficiency implement into two phases: 1 - save all subtasks w/o ids, then put in ids
-
unrolled_tasks = unroll_tasks()
-
rows = unrolled_tasks.map do |hash_row|
-
executable_action = hash_row[:executable_action]
-
row = {
-
:display_name => hash_row[:display_name]||"task#{hash_row[:position].to_s}",
-
:ref => "task#{hash_row[:position].to_s}",
-
:executable_action_type => executable_action ? Aux.demodulize(executable_action.class.to_s) : nil,
-
:executable_action => executable_action
-
}
-
cols = [:status, :result, :action_on_failure, :position, :temporal_order, :commit_message]
-
cols.each{|col|row.merge!(col => hash_row[col])}
-
[:assembly_id,:node_id,:target_id].each do |col|
-
row[col] = hash_row[col]||SQL::ColRef.null_id
-
end
-
row
-
end
-
new_idhs = Model.create_from_rows(model_handle,rows,{:convert => true,:do_not_update_info_table => true})
-
unrolled_tasks.each_with_index{|task,i|task.set_id_handle(new_idhs[i])}
-
-
# set parent relationship and use to set task_id (subtask parent) and children_status
-
par_rel_rows_for_id_info = set_and_ret_parents_and_children_status!()
-
par_rel_rows_for_task = par_rel_rows_for_id_info.map{|r|{:id => r[:id], :task_id => r[:parent_id], :children_status => r[:children_status]}}
-
-
Model.update_from_rows(model_handle,par_rel_rows_for_task) unless par_rel_rows_for_task.empty?
-
IDInfoTable.update_instances(model_handle,par_rel_rows_for_id_info)
-
end
-
-
1
def subtasks()
-
self[:subtasks]||[]
-
end
-
-
# for special tasks that have component actions
-
1
def component_actions()
-
if executable_action().kind_of?(Action::ConfigNode)
-
action = executable_action()
-
action.component_actions().map{|ca| action[:node] ? ca.merge(:node => action[:node]) : ca}
-
else
-
subtasks.map{|obj|obj.component_actions()}.flatten
-
end
-
end
-
-
1
def node_level_actions()
-
if executable_action().kind_of?(Action::NodeLevel)
-
action = executable_action()
-
return action.component_actions().map{|ca| action[:node] ? ca.merge(:node => action[:node]) : ca}
-
else
-
subtasks.map{|obj|obj.node_level_actions()}.flatten
-
end
-
end
-
-
1
def add_subtask_from_hash(hash)
-
defaults = {:status => "created", :action_on_failure => "abort"}
-
new_subtask = Task.new(defaults.merge(hash),c)
-
add_subtask(new_subtask)
-
end
-
-
1
def add_subtask(new_subtask)
-
(self[:subtasks] ||= Array.new) << new_subtask
-
new_subtask
-
end
-
-
1
def add_subtasks(new_subtasks)
-
new_subtasks.each { |new_subtask| (self[:subtasks] ||= Array.new) << new_subtask }
-
new_subtasks
-
end
-
-
1
def set_positions!()
-
self[:position] ||= 1
-
return nil if subtasks.empty?
-
subtasks.each_with_index do |e,i|
-
e[:position] = i+1
-
e.set_positions!()
-
end
-
end
-
-
1
def set_and_ret_parents_and_children_status!(parent_id=nil)
-
self[:task_id] = parent_id
-
id = id()
-
if subtasks.empty?
-
[:parent_id => parent_id, :id => id, :children_status => nil]
-
else
-
recursive_subtasks = subtasks.map{|st|st.set_and_ret_parents_and_children_status!(id)}.flatten
-
children_status = subtasks.inject({}){|h,st|h.merge(st.id() => "created")}
-
[:parent_id => parent_id, :id => id, :children_status => children_status] + recursive_subtasks
-
end
-
end
-
-
1
def unroll_tasks()
-
[self] + subtasks.map{|e|e.unroll_tasks()}.flatten
-
end
-
-
#### for rending tasks
-
1
public
-
1
def render_form()
-
# may be different forms; this is one that is organized by node_group, node, component, attribute
-
task_list = render_form_flat(true)
-
# TODO: not yet teating node_group
-
Task.render_group_by_node(task_list)
-
end
-
-
1
protected
-
# protected, not private, because of recursive call
-
1
def render_form_flat(top=false)
-
# prune out all (sub)tasks except for top and executable
-
return render_executable_tasks() if executable_action(:no_error_if_nil=>true)
-
(top ? [render_top_task()] : []) + subtasks.map{|e|e.render_form_flat()}.flatten
-
end
-
-
1
private
-
1
def executable_action(opts={})
-
unless @executable_action ||= self[:executable_action]
-
raise Error.new("executable_action should not be null") unless opts[:no_error_if_nil]
-
end
-
@executable_action
-
end
-
-
1
def self.render_group_by_node(task_list)
-
return task_list if task_list.size < 2
-
ret = nil
-
indexed_nodes = Hash.new
-
task_list.each do |t|
-
if t[:level] == "top"
-
ret = t
-
elsif t[:level] == "node"
-
indexed_nodes[t[:node_id]] = t
-
end
-
end
-
task_list.each do |t|
-
if t[:level] == "node"
-
ret[:children] << t
-
elsif t[:level] == "component"
-
if indexed_nodes[t[:node_id]]
-
indexed_nodes[t[:node_id]][:children] << t
-
else
-
node_task = Task.render_task_on_node(:node_id => t[:node_id], :node_name => t[:node_name])
-
node_task[:children] << t
-
ret[:children] << node_task
-
indexed_nodes[node_task[:node_id]] = node_task
-
end
-
end
-
end
-
ret
-
end
-
-
1
def render_top_task()
-
{:task_id => id(),
-
:level => "top",
-
:type => "top",
-
:action_on_failure=> self[:action_on_failure],
-
:children => Array.new
-
}
-
end
-
-
1
def render_executable_tasks()
-
executable_action = executable_action()
-
sc = executable_action[:state_change_types]
-
common_vals = {
-
:task_id => id(),
-
:status => self[:status],
-
}
-
# order is important
-
if sc.include?("create_node") then Task.render_tasks_create_node(executable_action,common_vals)
-
elsif sc.include?("install_component") then Task.render_tasks_component_op("install_component",executable_action,common_vals)
-
elsif sc.include?("setting") then Task.render_tasks_setting(executable_action,common_vals)
-
elsif sc.include?("update_implementation") then Task.render_tasks_component_op("update_implementation",executable_action,common_vals)
-
elsif sc.include?("converge_component") then Task.render_tasks_component_op("converge_component",executable_action,common_vals)
-
else
-
Log.error("do not treat executable tasks of type(s) #{sc.join(',')}")
-
nil
-
end
-
end
-
-
1
def self.render_task_on_node(node_info)
-
{:type => "on_node",
-
:level => "node",
-
:children => Array.new
-
}.merge(node_info)
-
end
-
-
1
def self.render_tasks_create_node(executable_action,common_vals)
-
node = executable_action[:node]
-
task = {
-
:type => "create_node",
-
:level => "node",
-
:node_id => node[:id],
-
:node_name => node[:display_name],
-
:children => Array.new
-
}
-
[task.merge(common_vals)]
-
end
-
-
1
def self.render_tasks_component_op(type,executable_action,common_vals)
-
node = executable_action[:node]
-
executable_action.component_actions().map do |component_action|
-
component = component_action[:component]
-
cmp_attrs = {
-
:component_id => component[:id],
-
:component_name => component[:display_name]
-
}
-
task = {
-
:type => type,
-
:level => "component",
-
:node_id => node[:id],
-
:node_name => node[:display_name],
-
:component_basic_type => component[:basic_type]
-
}
-
task.merge!(cmp_attrs)
-
task.merge!(common_vals)
-
add_attributes_to_component_task!(task,component_action,cmp_attrs)
-
end
-
end
-
-
1
def self.render_tasks_setting(executable_action,common_vals)
-
node = executable_action[:node]
-
executable_action.component_actions().map do |component_action|
-
component = component_action[:component]
-
cmp_attrs = {
-
:component_id => component[:id],
-
:component_name => component[:display_name].gsub(/::/,"_")
-
}
-
task = {
-
:type => "on_component",
-
:level => "component",
-
:node_id => node[:id],
-
:node_name => node[:display_name],
-
:component_basic_type => component[:basic_type]
-
}
-
task.merge!(cmp_attrs)
-
task.merge!(common_vals)
-
add_attributes_to_component_task!(task,component_action,cmp_attrs)
-
end
-
end
-
-
1
def self.add_attributes_to_component_task!(task,component_action,cmp_attrs)
-
attributes = component_action[:attributes]
-
return task unless attributes
-
keep_ids = component_action[:changed_attribute_ids]
-
pruned_attrs = attributes.reject do |a|
-
a[:hidden] or (keep_ids and not keep_ids.include?(a[:id]))
-
end
-
flattten_attrs = AttributeComplexType.flatten_attribute_list(pruned_attrs)
-
flattten_attrs.each do |a|
-
val = a[:attribute_value]
-
if val.nil?
-
next unless a[:port_type] == "input" and a[:required]
-
val = "DYNAMICALLY SET"
-
end
-
attr_task = {
-
:type => "setting",
-
:level => "attribute",
-
:attribute_id => a[:id],
-
:attribute_name => a[:display_name],
-
:attribute_value => val,
-
:attribute_data_type => a[:data_type],
-
:attribute_required => a[:required],
-
:attribute_dynamic => a[:dynamic]
-
}
-
attr_task.merge!(cmp_attrs)
-
task[:children]||= Array.new
-
task[:children] << attr_task
-
end
-
task
-
end
-
end
-
end
-
-
-
2
module DTK; class Task
-
1
class Action < HashObject
-
1
def type()
-
Aux.underscore(Aux.demodulize(self.class.to_s)).to_sym
-
end
-
-
# implemented functions
-
1
def long_running?()
-
nil
-
end
-
-
# returns [adapter_type,adapter_name], adapter name optional in which it wil be looked up from config
-
1
def ret_command_and_control_adapter_info()
-
nil
-
end
-
-
1
class OnNode < self
-
1
def self.create_from_node(node)
-
state_change = {:node => node}
-
new(:state_change,state_change,nil)
-
end
-
1
def self.create_from_state_change(state_change,assembly_idh=nil)
-
new(:state_change,state_change,nil,assembly_idh)
-
end
-
1
def self.create_from_hash(task_action_type,hash,task_idh=nil)
-
case task_action_type
-
when "CreateNode" then CreateNode.new(:hash,hash,task_idh)
-
when "ConfigNode" then ConfigNode.new(:hash,hash,task_idh)
-
when "PowerOnNode" then PowerOnNode.new(:hash,hash,task_idh)
-
when "InstallAgent" then InstallAgent.new(:hash,hash,task_idh)
-
when "ExecuteSmoketest" then ExecuteSmoketest.new(:hash,hash,task_idh)
-
when "Hash" then InstallAgent.new(:hash,hash,task_idh) #RICH-WF; Aldin compensating form bug in task creation
-
else raise Error.new("Unexpected task_action_type (#{task_action_type})")
-
end
-
end
-
1
def self.task_action_type()
-
@task_action_type ||= to_s.split('::').last
-
end
-
1
def task_action_type()
-
self.class.task_action_type()
-
end
-
-
1
def initialize(type,hash,task_idh=nil)
-
unless hash[:node].kind_of?(Node)
-
hash[:node] &&= Node.create_from_model_handle(hash[:node],task_idh.createMH(:node),:subclass=>true)
-
end
-
super(hash)
-
end
-
-
###====== related to node(s); node can be a node group
-
1
def node_is_node_group?()
-
self[:node].is_node_group?()
-
end
-
-
1
def nodes()
-
node_or_ng = self[:node]
-
if node_or_ng.is_node_group?()
-
node_or_ng.get_node_group_members()
-
else
-
[node_or_ng]
-
end
-
end
-
-
1
def node_id()
-
self[:node][:id]
-
end
-
-
1
def get_and_propagate_dynamic_attributes(result,opts={})
-
dyn_attr_val_info = get_dynamic_attributes_with_retry(result,opts)
-
return if dyn_attr_val_info.empty?
-
attr_mh = self[:node].model_handle_with_auth_info(:attribute)
-
Attribute.update_and_propagate_dynamic_attributes(attr_mh,dyn_attr_val_info)
-
end
-
-
###====== end: related to node(s); node can be a node group
-
-
1
def attributes_to_set()
-
Array.new
-
end
-
-
# virtual gets overwritten
-
# updates object and the tasks in the model
-
1
def get_and_update_attributes!(task)
-
# raise "You need to implement 'get_and_update_attributes!' method for class #{self.class}"
-
end
-
-
# virtual gets overwritten
-
1
def add_internal_guards!(guards)
-
# raise "You need to implement 'add_internal_guards!' method for class #{self.class}"
-
end
-
-
1
def update_state_change_status_aux(task_mh,status,state_change_ids)
-
rows = state_change_ids.map{|id|{:id => id, :status => status.to_s}}
-
state_change_mh = task_mh.createMH(:state_change)
-
Model.update_from_rows(state_change_mh,rows)
-
end
-
-
1
private
-
1
def node_create_obj_optional_subclass(node)
-
node && node.create_obj_optional_subclass()
-
end
-
-
1
def get_dynamic_attributes_with_retry(result,opts={})
-
ret = get_dynamic_attributes(result)
-
if non_null_attrs = opts[:non_null_attributes]
-
ret = retry_get_dynamic_attributes(ret,non_null_attrs){get_dynamic_attributes(result)}
-
end
-
ret
-
end
-
-
1
def retry_get_dynamic_attributes(dyn_attr_val_info,non_null_attrs,count=1,&block)
-
if values_non_null?(dyn_attr_val_info,non_null_attrs)
-
dyn_attr_val_info
-
elsif count > RetryMaxCount
-
raise Error.new("cannot get all attributes with keys (#{non_null_attrs.join(",")})")
-
elsif block.nil?
-
raise Error.new("Unexpected that block.nil?")
-
else
-
sleep(RetrySleep)
-
retry_get_dynamic_attributes(block.call(),non_null_attrs,count+1,&block)
-
end
-
end
-
1
RetryMaxCount = 60
-
1
RetrySleep = 1
-
1
def values_non_null?(dyn_attr_val_info,keys)
-
keys.each do |k|
-
is_non_null = nil
-
if match = dyn_attr_val_info.find{|a|a[:display_name] == k}
-
if val = match[:attribute_value]
-
is_non_null = (val.kind_of?(Array) ? val.find{|el|el} : true)
-
end
-
end
-
return nil unless is_non_null
-
end
-
true
-
end
-
-
# generic; can be overwritten
-
1
def self.node_status(object,opts)
-
ret = PrettyPrintHash.new
-
node = object[:node]||{}
-
if name = node_status__name(node)
-
ret[:name] = name
-
end
-
if id = node[:id]
-
ret[:id] = id
-
end
-
ret
-
end
-
-
1
def self.node_status__name(node)
-
node && Node.assembly_node_print_form?(node)
-
end
-
-
end
-
-
1
class NodeLevel < OnNode
-
end
-
-
1
class PhysicalNode < self
-
1
def initialize(type,hash,task_idh=nil)
-
unless hash[:node].kind_of?(Node)
-
hash[:node] &&= Node.create_from_model_handle(hash[:node],task_idh.createMH(:node),:subclass=>true)
-
end
-
super(hash)
-
end
-
-
1
def self.create_from_physical_nodes(target, node)
-
node[:datacenter] = target
-
hash = {
-
:node => node,
-
:datacenter => target,
-
:user_object => CurrentSession.new.get_user_object()
-
}
-
-
InstallAgent.new(:hash,hash)
-
end
-
-
1
def self.create_smoketest_from_physical_nodes(target, node)
-
node[:datacenter] = target
-
hash = {
-
:node => node,
-
:datacenter => target,
-
:user_object => CurrentSession.new.get_user_object()
-
}
-
-
ExecuteSmoketest.new(:hash,hash)
-
end
-
-
# virtual gets overwritten
-
# updates object and the tasks in the model
-
1
def get_and_update_attributes!(task)
-
# raise "You need to implement 'get_and_update_attributes!' method for class #{self.class}"
-
end
-
-
# virtual gets overwritten
-
1
def add_internal_guards!(guards)
-
# raise "You need to implement 'add_internal_guards!' method for class #{self.class}"
-
end
-
end
-
-
-
1
r8_nested_require('action','create_node')
-
1
r8_nested_require('action','config_node')
-
1
r8_nested_require('action','on_component')
-
1
r8_nested_require('action','install_agent')
-
1
r8_nested_require('action','execute_smoketest')
-
-
1
class Result < HashObject
-
1
def initialize(hash={})
-
super(hash)
-
self[:result_type] = Aux.demodulize(self.class.to_s).downcase
-
end
-
-
1
class Succeeded < self
-
1
def initialize(hash={})
-
super(hash)
-
end
-
end
-
1
class Failed < self
-
1
def initialize(error)
-
super()
-
self[:error] = error.to_hash
-
end
-
end
-
1
class Cancelled < self
-
1
def initialize(hash={})
-
super(hash)
-
end
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Task
-
1
class Action
-
1
class ConfigNode < OnNode
-
1
def self.create_from_execution_blocks(exec_blocks,assembly_idh=nil)
-
task_idh = nil #not needed in new
-
new(:execution_blocks,exec_blocks,task_idh,assembly_idh)
-
end
-
-
1
def create_node_group_member(node)
-
self.class.new(:hash,:node => node,:node_group_member => true)
-
end
-
-
1
def self.component_actions(obj)
-
obj[:component_actions]||[]
-
end
-
1
def component_actions()
-
self.class.component_actions(self)
-
end
-
-
1
def set_intra_node_stages!(intra_node_stages)
-
self[:intra_node_stages] = intra_node_stages
-
end
-
1
def intra_node_stages()
-
self[:intra_node_stages]
-
end
-
1
def set_inter_node_stage!(internode_stage_index)
-
self[:inter_node_stage] = internode_stage_index && internode_stage_index.to_s
-
end
-
1
def inter_node_stage()
-
self[:inter_node_stage]
-
end
-
1
def is_first_inter_node_stage?()
-
inter_node_stage = inter_node_stage()
-
inter_node_stage.nil? or inter_node_stage == "1"
-
end
-
-
1
def self.status(object,opts)
-
ret = PrettyPrintHash.new
-
ret[:node] = node_status(object,opts)
-
unless opts[:no_components]
-
ret[:components] = component_actions(object).map do |component_action|
-
OnComponent.status(component_action,opts)
-
end
-
end
-
ret
-
end
-
-
# for debugging
-
1
def self.pretty_print_hash(object)
-
ret = PrettyPrintHash.new
-
ret[:node] = (object[:node]||{})[:display_name]
-
ret[:component_actions] = component_actions(object).map do |component_action|
-
OnComponent.pretty_print_hash(component_action)
-
end
-
ret
-
end
-
-
1
def long_running?()
-
true
-
end
-
-
1
def get_dynamic_attributes(result)
-
ret = Array.new
-
dyn_attrs = (result[:data]||{})[:dynamic_attributes]
-
return ret if dyn_attrs.nil? or dyn_attrs.empty?
-
dyn_attrs.map{|a|{:id => a[:attribute_id], :attribute_value => a[:attribute_val]}}
-
end
-
-
1
def self.add_attributes!(attr_mh,action_list)
-
# ndx_actions values is an array of actions to handel case wheer component on node group and multiple nodes refernce it
-
ndx_actions = Hash.new
-
action_list.each do |config_node_action|
-
component_actions(config_node_action).each do |a|
-
(ndx_actions[a[:component][:id]] ||= Array.new) << a
-
end
-
end
-
return nil if ndx_actions.empty?
-
-
parent_field_name = DB.parent_field(:component,:attribute)
-
sp_hash = {
-
:relation => :attribute,
-
:filter => [:oneof, parent_field_name, ndx_actions.keys],
-
:columns => [:id,:display_name,parent_field_name,:external_ref,:attribute_value,:required,:dynamic,:port_type,:port_is_external, :data_type, :semantic_type, :hidden]
-
}
-
attrs = Model.get_objs(attr_mh,sp_hash)
-
-
attrs.each do |attr|
-
unless attr.is_constant?()
-
actions = ndx_actions[attr[parent_field_name]]
-
actions.each{|action|action.add_attribute!(attr)}
-
end
-
end
-
end
-
-
1
def add_internal_guards!(guards)
-
self[:internal_guards] = guards
-
end
-
-
1
def get_and_update_attributes!(task)
-
task_mh = task.model_handle()
-
# these two below update the ruby obj
-
get_and_update_attributes__node_ext_ref!(task_mh)
-
get_and_update_attributes__cmp_attrs!(task_mh)
-
get_and_update_attributes__assembly_attrs!(task_mh)
-
# this updates the task model
-
update_bound_input_attrs!(task)
-
end
-
-
1
def get_and_update_attributes__node_ext_ref!(task_mh)
-
# TODO: may treat updating node as regular attribute
-
# no up if already have the node's external ref
-
unless ((self[:node]||{})[:external_ref]||{})[:instance_id]
-
node_id = (self[:node]||{})[:id]
-
if node_id
-
node_info = Model.get_object_columns(task_mh.createIDH(:id => node_id, :model_name => :node),[:external_ref])
-
self[:node][:external_ref] = node_info[:external_ref]
-
else
-
Log.error("cannot update task action's node id because do not have its id")
-
end
-
end
-
end
-
-
1
def get_and_update_attributes__assembly_attrs!(task_mh)
-
if assembly = self[:assembly_idh] && IDHandle.new(self[:assembly_idh]).create_object(:model_name => :assembly)
-
assembly_attr_vals = assembly.get_assembly_level_attributes()
-
unless assembly_attr_vals.empty?
-
self[:assembly_attributes] = assembly_attr_vals
-
end
-
end
-
end
-
-
1
def get_and_update_attributes__cmp_attrs!(task_mh)
-
# find attributes that can be updated
-
# TODO: right now being conservative in including attributes that may not need to be set
-
indexed_attrs_to_update = Hash.new
-
component_actions().each do |action|
-
(action[:attributes]||[]).each do |attr|
-
# TODO: more efficient to just get attributes that can be inputs; right now :is_port does not
-
# reflect this in cases for a3 in example a1 -external -> a2 -internal -> a3
-
# so commenting out below and replacing with less stringent
-
# if attr[:is_port] and not attr[:value_asserted]
-
if not attr[:value_asserted]
-
indexed_attrs_to_update[attr[:id]] = attr
-
end
-
end
-
end
-
return if indexed_attrs_to_update.empty?
-
sp_hash = {
-
:relation => :attribute,
-
:filter => [:and,[:oneof, :id, indexed_attrs_to_update.keys]],
-
:columns => [:id,:value_derived]
-
}
-
new_attr_vals = Model.get_objs(task_mh.createMH(:model_name => :attribute),sp_hash)
-
new_attr_vals.each do |a|
-
attr = indexed_attrs_to_update[a[:id]]
-
attr[:value_derived] = a[:value_derived]
-
end
-
end
-
1
def update_bound_input_attrs!(task)
-
bound_input_attrs = component_actions().map do |action|
-
(action[:attributes]||[]).map do |attr|
-
{
-
:component_display_name => action[:component][:display_name],
-
:attribute_display_name => attr[:display_name],
-
:attribute_value => attr[:attribute_value]
-
}
-
end
-
end.flatten(1)
-
task.update(:bound_input_attrs => bound_input_attrs)
-
end
-
-
-
1
def ruby_function_implementation?()
-
component_actions = self[:component_actions]||[]
-
unless component_actions.empty?
-
# check taht all elements have ruby function type
-
!component_actions.find{|a|'ruby_function' != ((a[:component]||{})[:external_ref]||{})[:type]}
-
end
-
end
-
-
1
def assembly_wide_component?()
-
if node_type = (self[:node]||{})[:type]
-
node_type.eql?(AssemblyWideNodename)
-
end
-
end
-
1
AssemblyWideNodename = 'assembly_wide'
-
-
# returns [adapter_type,adapter_name]
-
# adapter_name can be null-> default is used
-
1
def ret_command_and_control_adapter_info()
-
adapter_type = :node_config
-
adapter_name =
-
if assembly_wide_component?()
-
# adapter_name indicating toe xecute on server, rather than dispatching to a node
-
:server
-
end
-
[adapter_type,adapter_name]
-
end
-
-
1
def update_state_change_status(task_mh,status)
-
update_state_change_status_aux(task_mh,status,component_actions().map{|x|x[:state_change_pointer_ids]}.compact.flatten)
-
end
-
-
1
def config_agent_type()
-
self[:config_agent_type] || raise(Error.new("self[:config_agent_type] should not be nil"))
-
end
-
-
1
private
-
1
def initialize(type,object,task_idh=nil,assembly_idh=nil)
-
# TODO: clean up so dont have to look for assembly_idh in two places
-
assembly_idh ||= object[:assembly_idh]
-
intra_node_stages = hash = nil
-
case type
-
when :state_change
-
sc = object
-
sample_state_change = sc.first
-
node = sample_state_change[:node]
-
component_actions,intra_node_stages = OnComponent.order_and_group_by_component(sc)
-
hash = {
-
:node => node,
-
:state_change_types => sc.map{|sc|sc[:type]}.uniq,
-
:config_agent_type => sc.first.on_node_config_agent_type,
-
:component_actions => component_actions
-
}
-
hash.merge!(:assembly_idh => assembly_idh) if assembly_idh
-
when :hash
-
if component_actions = object[:component_actions]
-
component_actions.each_with_index{|ca,i|component_actions[i] = OnComponent.create_from_hash(ca,task_idh)}
-
end
-
hash = object
-
when :execution_blocks
-
exec_blocks = object
-
actions,config_agent_type = OnComponent.create_actions_from_execution_blocks(exec_blocks)
-
hash = {
-
:node => exec_blocks.node(),
-
:state_change_types => ["converge_component"],
-
:config_agent_type => config_agent_type,
-
:component_actions => actions
-
}
-
hash.merge!(:assembly_idh => assembly_idh) if assembly_idh
-
intra_node_stages = exec_blocks.intra_node_stages()
-
else
-
raise Error.new("Unexpected ConfigNode.initialize type")
-
end
-
super(type,hash,task_idh)
-
# set_intra_node_stages must be done after super
-
set_intra_node_stages!(intra_node_stages) if intra_node_stages
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Task
-
1
class Action
-
1
class CreateNode < NodeLevel
-
1
def initialize(type,object,task_idh=nil,assembly_idh=nil)
-
hash =
-
case type
-
when :state_change
-
{
-
:state_change_id => object[:id],
-
:state_change_types => [object[:type]],
-
:attributes => Array.new,
-
:node => node_create_obj_optional_subclass(object[:node]),
-
:datacenter => object[:datacenter],
-
:user_object => CurrentSession.new.get_user_object()
-
}
-
when :hash
-
object
-
else
-
raise Error.new("Unexpected CreateNode.initialize type")
-
end
-
super(type,hash,task_idh)
-
end
-
1
private :initialize
-
-
1
def base_node()
-
self[:node]
-
end
-
1
def target()
-
target_hash_or_obj = self[:datacenter]
-
if target_hash_or_obj.kind_of?(Target)
-
target_hash_or_obj
-
else
-
target_hash = target_hash_or_obj
-
target_idh = base_node.model_handle(:target_instance).createIDH(:id => target_hash[:id])
-
target_idh.create_object(target_hash)
-
end
-
end
-
-
1
def self.stage_display_name()
-
"create_nodes_stage"
-
end
-
1
def self.task_display_name()
-
"create_node"
-
end
-
-
1
def self.status(object,opts)
-
ret = PrettyPrintHash.new
-
ret[:node] = node_status(object,opts)
-
ret
-
end
-
-
# for debugging
-
1
def self.pretty_print_hash(object)
-
ret = PrettyPrintHash.new
-
ret[:node] = (object[:node]||{})[:display_name]
-
ret
-
end
-
-
1
def get_dynamic_attributes(result)
-
ret = Array.new
-
attrs_to_set = attributes_to_set()
-
attr_names = attrs_to_set.map{|a|a[:display_name].to_sym}
-
av_pairs__node_components = get_dynamic_attributes__node_components!(attr_names)
-
rest_av_pairs = (attr_names.empty? ? {} : CommandAndControl.get_and_update_node_state!(self[:node],attr_names))
-
av_pairs = av_pairs__node_components.merge(rest_av_pairs)
-
return ret if av_pairs.empty?
-
attrs_to_set.each do |attr|
-
attr_name = attr[:display_name].to_sym
-
# TODO: can test and case here whether value changes such as wehetehr new ip address
-
attr[:attribute_value] = av_pairs[attr_name] if av_pairs.has_key?(attr_name)
-
ret << attr
-
end
-
ret
-
end
-
-
###special processing for node_components
-
1
def get_dynamic_attributes__node_components!(attr_names)
-
ret = Hash.new
-
return ret unless attr_names.delete(:node_components)
-
# TODO: hack
-
ipv4_val = CommandAndControl.get_and_update_node_state!(self[:node],[:host_addresses_ipv4])
-
return ret if ipv4_val.empty?
-
cmps = self[:node].get_objs(:cols => [:components]).map{|r|r[:component][:display_name].gsub("__","::")}
-
ret = {:node_components => {ipv4_val.values.first[0] => cmps}}
-
if attr_names.delete(:host_addresses_ipv4)
-
ret.merge!(ipv4_val)
-
end
-
ret
-
end
-
-
1
def add_attribute!(attr)
-
self[:attributes] << attr
-
end
-
-
1
def attributes_to_set()
-
self[:attributes].reject{|a| not a[:dynamic]}
-
end
-
-
1
def ret_command_and_control_adapter_info()
-
[:iaas,R8::Config[:command_and_control][:iaas][:type].to_sym]
-
end
-
-
1
def update_state_change_status(task_mh,status)
-
# no op if no associated state change
-
if self[:state_change_id]
-
update_state_change_status_aux(task_mh,status,[self[:state_change_id]])
-
end
-
end
-
-
1
def self.add_attributes!(attr_mh,action_list)
-
ndx_actions = Hash.new
-
action_list.each{|a|ndx_actions[a[:node][:id]] = a}
-
return nil if ndx_actions.empty?
-
-
parent_field_name = DB.parent_field(:node,:attribute)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,parent_field_name,:external_ref,:attribute_value,:required,:dynamic],
-
:filter => [:and,
-
[:eq, :dynamic, true],
-
[:oneof, parent_field_name, ndx_actions.keys]]
-
}
-
-
attrs = Model.get_objs(attr_mh,sp_hash)
-
-
attrs.each do |attr|
-
action = ndx_actions[attr[parent_field_name]]
-
action.add_attribute!(attr)
-
end
-
end
-
-
1
def create_node_config_agent_type
-
self[:config_agent_type]
-
end
-
-
1
private
-
1
def self.node_status(object,opts)
-
node = object[:node]||{}
-
ext_ref = node[:external_ref]||{}
-
kv_array =
-
[{:name => node_status__name(node)},
-
{:id => node[:id]},
-
{:type => ext_ref[:type]},
-
{:image_id => ext_ref[:image_id]},
-
{:size => ext_ref[:size]},
-
]
-
PrettyPrintHash.new.set?(*kv_array)
-
end
-
end
-
-
##
-
# Class we are using to execute code which is responsible for handling Node
-
# when she moves from pending state to running state.
-
##
-
# TODO: move common fns to NodeLevel and then have this inherit to NodeLevel
-
1
class PowerOnNode < CreateNode
-
1
def self.stage_display_name()
-
"power_on_nodes_stage"
-
end
-
1
def self.task_display_name()
-
"power_on_node"
-
end
-
end
-
end
-
end; end
-
-
2
module DTK; class Task
-
1
class Action
-
1
class ExecuteSmoketest < PhysicalNode
-
1
def initialize(type,object,task_idh=nil,assembly_idh=nil)
-
hash =
-
case type
-
when :state_change
-
{
-
:state_change_id => object[:id],
-
:state_change_types => [object[:type]],
-
:attributes => Array.new,
-
:node => node_create_obj_optional_subclass(object[:node]),
-
:datacenter => object[:datacenter],
-
:user_object => CurrentSession.new.get_user_object()
-
}
-
when :hash
-
object
-
else
-
raise Error.new("Unexpected InstallAgent.initialize type")
-
end
-
super(type,hash,task_idh)
-
end
-
1
private :initialize
-
-
1
def self.status(object,opts)
-
ret = PrettyPrintHash.new
-
ret[:node] = node_status(object,opts)
-
ret
-
end
-
-
#for debugging
-
1
def self.pretty_print_hash(object)
-
ret = PrettyPrintHash.new
-
ret[:node] = (object[:node]||{})[:display_name]
-
ret
-
end
-
-
1
def get_dynamic_attributes(result)
-
ret = Array.new
-
# attrs_to_set = attributes_to_set()
-
# attr_names = attrs_to_set.map{|a|a[:display_name].to_sym}
-
# av_pairs__node_components = get_dynamic_attributes__node_components!(attr_names)
-
# rest_av_pairs = (attr_names.empty? ? {} : CommandAndControl.get_and_update_node_state!(self[:node],attr_names))
-
# av_pairs = av_pairs__node_components.merge(rest_av_pairs)
-
# return ret if av_pairs.empty?
-
# attrs_to_set.each do |attr|
-
# attr_name = attr[:display_name].to_sym
-
# #TODO: can test and case here whether value changes such as wehetehr new ip address
-
# attr[:attribute_value] = av_pairs[attr_name] if av_pairs.has_key?(attr_name)
-
# ret << attr
-
# end
-
ret
-
end
-
-
###special processing for node_components
-
1
def get_dynamic_attributes__node_components!(attr_names)
-
ret = Hash.new
-
# return ret unless attr_names.delete(:node_components)
-
# #TODO: hack
-
# ipv4_val = CommandAndControl.get_and_update_node_state!(self[:node],[:host_addresses_ipv4])
-
# return ret if ipv4_val.empty?
-
# cmps = self[:node].get_objs(:cols => [:components]).map{|r|r[:component][:display_name].gsub("__","::")}
-
# ret = {:node_components => {ipv4_val.values.first[0] => cmps}}
-
# if attr_names.delete(:host_addresses_ipv4)
-
# ret.merge!(ipv4_val)
-
# end
-
ret
-
end
-
-
1
def add_attribute!(attr)
-
self[:attributes] << attr
-
end
-
-
1
def attributes_to_set()
-
self[:attributes].reject{|a| not a[:dynamic]}
-
end
-
-
1
def ret_command_and_control_adapter_info()
-
[:node_config,:smoketest]
-
# [:iaas,R8::Config[:command_and_control][:iaas][:type].to_sym]
-
end
-
-
1
def update_state_change_status(task_mh,status)
-
#no op if no associated state change
-
if self[:state_change_id]
-
update_state_change_status_aux(task_mh,status,[self[:state_change_id]])
-
end
-
end
-
-
1
def self.add_attributes!(attr_mh,action_list)
-
ndx_actions = Hash.new
-
action_list.each{|a|ndx_actions[a[:node][:id]] = a}
-
return nil if ndx_actions.empty?
-
-
parent_field_name = DB.parent_field(:node,:attribute)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,parent_field_name,:external_ref,:attribute_value,:required,:dynamic],
-
:filter => [:and,
-
[:eq, :dynamic, true],
-
[:oneof, parent_field_name, ndx_actions.keys]]
-
}
-
-
attrs = Model.get_objs(attr_mh,sp_hash)
-
-
attrs.each do |attr|
-
action = ndx_actions[attr[parent_field_name]]
-
action.add_attribute!(attr)
-
end
-
end
-
-
1
def create_node_config_agent_type
-
self[:config_agent_type]
-
end
-
-
1
private
-
1
def self.node_status(object,opts)
-
node = object[:node]||{}
-
ext_ref = node[:external_ref]||{}
-
kv_array =
-
[{:name => node_status__name(node)},
-
{:id => node[:id]},
-
{:type => ext_ref[:type]},
-
{:image_id => ext_ref[:image_id]},
-
{:size => ext_ref[:size]},
-
]
-
PrettyPrintHash.new.set?(*kv_array)
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Task
-
1
class Action
-
1
class InstallAgent < PhysicalNode
-
1
def initialize(type,object,task_idh=nil,assembly_idh=nil)
-
hash =
-
case type
-
when :state_change
-
{
-
:state_change_id => object[:id],
-
:state_change_types => [object[:type]],
-
:attributes => Array.new,
-
:node => node_create_obj_optional_subclass(object[:node]),
-
:datacenter => object[:datacenter],
-
:user_object => CurrentSession.new.get_user_object()
-
}
-
when :hash
-
object
-
else
-
raise Error.new("Unexpected InstallAgent.initialize type")
-
end
-
super(type,hash,task_idh)
-
end
-
1
private :initialize
-
-
1
def self.status(object,opts)
-
ret = PrettyPrintHash.new
-
ret[:node] = node_status(object,opts)
-
ret
-
end
-
-
# for debugging
-
1
def self.pretty_print_hash(object)
-
ret = PrettyPrintHash.new
-
ret[:node] = (object[:node]||{})[:display_name]
-
ret
-
end
-
-
1
def get_dynamic_attributes(result)
-
ret = Array.new
-
# attrs_to_set = attributes_to_set()
-
# attr_names = attrs_to_set.map{|a|a[:display_name].to_sym}
-
# av_pairs__node_components = get_dynamic_attributes__node_components!(attr_names)
-
# rest_av_pairs = (attr_names.empty? ? {} : CommandAndControl.get_and_update_node_state!(self[:node],attr_names))
-
# av_pairs = av_pairs__node_components.merge(rest_av_pairs)
-
# return ret if av_pairs.empty?
-
# attrs_to_set.each do |attr|
-
# attr_name = attr[:display_name].to_sym
-
# #TODO: can test and case here whether value changes such as wehetehr new ip address
-
# attr[:attribute_value] = av_pairs[attr_name] if av_pairs.has_key?(attr_name)
-
# ret << attr
-
# end
-
ret
-
end
-
-
###special processing for node_components
-
1
def get_dynamic_attributes__node_components!(attr_names)
-
ret = Hash.new
-
# return ret unless attr_names.delete(:node_components)
-
# #TODO: hack
-
# ipv4_val = CommandAndControl.get_and_update_node_state!(self[:node],[:host_addresses_ipv4])
-
# return ret if ipv4_val.empty?
-
# cmps = self[:node].get_objs(:cols => [:components]).map{|r|r[:component][:display_name].gsub("__","::")}
-
# ret = {:node_components => {ipv4_val.values.first[0] => cmps}}
-
# if attr_names.delete(:host_addresses_ipv4)
-
# ret.merge!(ipv4_val)
-
# end
-
ret
-
end
-
-
1
def add_attribute!(attr)
-
self[:attributes] << attr
-
end
-
-
1
def attributes_to_set()
-
self[:attributes].reject{|a| not a[:dynamic]}
-
end
-
-
1
def ret_command_and_control_adapter_info()
-
[:node_config,:ssh]
-
# [:iaas,R8::Config[:command_and_control][:iaas][:type].to_sym]
-
end
-
-
1
def update_state_change_status(task_mh,status)
-
# no op if no associated state change
-
if self[:state_change_id]
-
update_state_change_status_aux(task_mh,status,[self[:state_change_id]])
-
end
-
end
-
-
1
def self.add_attributes!(attr_mh,action_list)
-
ndx_actions = Hash.new
-
action_list.each{|a|ndx_actions[a[:node][:id]] = a}
-
return nil if ndx_actions.empty?
-
-
parent_field_name = DB.parent_field(:node,:attribute)
-
sp_hash = {
-
:cols => [:id,:group_id,:display_name,parent_field_name,:external_ref,:attribute_value,:required,:dynamic],
-
:filter => [:and,
-
[:eq, :dynamic, true],
-
[:oneof, parent_field_name, ndx_actions.keys]]
-
}
-
-
attrs = Model.get_objs(attr_mh,sp_hash)
-
-
attrs.each do |attr|
-
action = ndx_actions[attr[parent_field_name]]
-
action.add_attribute!(attr)
-
end
-
end
-
-
1
def create_node_config_agent_type
-
self[:config_agent_type]
-
end
-
-
1
private
-
1
def self.node_status(object,opts)
-
node = object[:node]||{}
-
ext_ref = node[:external_ref]||{}
-
kv_array =
-
[{:name => node_status__name(node)},
-
{:id => node[:id]},
-
{:type => ext_ref[:type]},
-
{:image_id => ext_ref[:image_id]},
-
{:size => ext_ref[:size]},
-
]
-
PrettyPrintHash.new.set?(*kv_array)
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Task
-
1
class Action
-
1
class OnComponent < HashObject
-
1
def self.status(object,opts)
-
if opts[:no_attributes]
-
component_name(object)
-
else
-
ret = PrettyPrintHash.new
-
ret[:component] = component_status(object,opts)
-
ret[:attributes] = attributes_status(object,opts) unless opts[:no_attributes]
-
ret
-
end
-
end
-
-
1
def action_def()
-
ret = nil
-
component = self[:component]
-
unless action_def_ref = self[:action_method]
-
Log.error("Component Action with following component id #{component[:id].to_s} has no action_method")
-
return ret
-
end
-
sp_hash = {
-
:cols => [:id,:method_name,:content],
-
:filter => [:eq,:id,action_def_ref[:action_def_id]]
-
}
-
action_def_mh = component.id_handle().create_childMH(:action_def)
-
action_defs = Model.get_objs(action_def_mh,sp_hash)
-
if action_defs.empty?
-
Log.error("Cannot find action def that matches with ref (#{action_def_ref.inspect})")
-
nil
-
else
-
action_defs.first
-
end
-
end
-
-
-
# for debugging
-
1
def self.pretty_print_hash(object)
-
ret = PrettyPrintHash.new
-
ret[:component] = (object[:component]||{})[:display_name]
-
-
# TODO: should get attribute values from attribute object since task info can be stale
-
-
ret[:attributes] = (object[:attributes]||[]).map do |attr|
-
ret_attr = PrettyPrintHash.new
-
ret_attr.add(attr,:display_name,:value_asserted,:value_derived)
-
end
-
ret
-
end
-
1
def self.create_from_hash(hash,task_idh=nil)
-
if component = hash[:component]
-
unless component.kind_of?(Component)
-
unless task_idh
-
raise Error.new("If hash[:component] is not of type Component then task_idh must be supplied")
-
end
-
hash[:component] = Component.create_from_model_handle(component,task_idh.createMH(:component))
-
end
-
end
-
if attrs = hash[:attributes]
-
unless attrs.empty?
-
attr_mh = task_idh.createMH(:attribute)
-
attrs.each_with_index{|attr,i|attrs[i] = Attribute.create_from_model_handle(attr,attr_mh)}
-
end
-
end
-
new(hash)
-
end
-
-
# returns component_actions,intra_node_stages
-
1
def self.order_and_group_by_component(state_change_list)
-
intra_node_stages = nil
-
ndx_cmp_idhs = Hash.new
-
state_change_list.each do |sc|
-
cmp = sc[:component]
-
ndx_cmp_idhs[cmp[:id]] ||= cmp.id_handle()
-
end
-
components = Component::Instance.get_components_with_dependency_info(ndx_cmp_idhs.values)
-
cmp_deps = ComponentOrder.get_ndx_cmp_type_and_derived_order(components)
-
if Workflow.intra_node_stages?
-
cmp_order,intra_node_stages = get_intra_node_stages(cmp_deps, state_change_list)
-
elsif Workflow.intra_node_total_order?
-
node = state_change_list.first[:node]
-
cmp_order = get_total_component_order(cmp_deps, node)
-
else
-
raise Error.new("No intra node ordering strategy found")
-
end
-
component_actions = cmp_order.map do |(component_id,deps)|
-
create_from_state_change(state_change_list.select{|a|a[:component][:id] == component_id},deps)
-
end
-
[component_actions,intra_node_stages]
-
end
-
-
# returns cmp_ids_with_deps,intra_node_stages
-
1
def self.get_intra_node_stages(cmp_deps, state_change_list)
-
cmp_ids_with_deps = get_cmp_ids_with_deps(cmp_deps).clone
-
cd_ppt_stgs, scl_ppt_stgs = Stage::PuppetStageGenerator.generate_stages(cmp_ids_with_deps.dup, state_change_list.dup)
-
intra_node_stages = Array.new
-
cd_ppt_stgs.each_with_index do |cd, i|
-
cmp_ids_with_deps_ps = cd_ppt_stgs[i].dup
-
state_change_list_ps = scl_ppt_stgs[i]
-
intranode_stages_with_deps = Stage::IntraNode.generate_stages(cmp_ids_with_deps_ps, state_change_list_ps)
-
intra_node_stages << intranode_stages_with_deps.map{|stage|stage.keys }
-
end
-
# Amar: to enable multiple puppet calls inside one puppet_apply agent call,
-
# puppet_stages are added to intra node stages. Check PuppetStageGenerator class docs for more details
-
[cmp_ids_with_deps,intra_node_stages]
-
end
-
-
# Amar
-
# Return order from node table if order is consistent, otherwise generate order through TSort and update order in table
-
1
def self.get_total_component_order(cmp_deps, node)
-
cmp_ids_with_deps = get_cmp_ids_with_deps(cmp_deps)
-
# Get order from DB
-
cmp_order = node.get_ordered_component_ids()
-
# return if consistent
-
return cmp_order if is_total_order_consistent?(cmp_ids_with_deps, cmp_order)
-
-
# generate order via TSort
-
cmp_order = generate_component_order(cmp_ids_with_deps)
-
# update order in node table
-
node.update_ordered_component_ids(cmp_order)
-
return cmp_order
-
end
-
-
# Amar: Checking if existing order in node table is consistent
-
1
def self.is_total_order_consistent?(cmp_ids_with_deps, order)
-
return false if order.empty?
-
return false unless cmp_ids_with_deps.keys.sort == order.sort
-
begin
-
cmp_ids_with_deps.map do |parent, children|
-
unless children.empty?
-
children.each do |child|
-
return false if order.index(child) > order.index(parent) # inconsistent if any child order is after parent
-
end
-
end
-
end
-
rescue Exception => e
-
return false
-
end
-
return true
-
end
-
-
1
def self.get_cmp_ids_with_deps(cmp_deps)
-
# TODO: assumption that only a singleton component can be a dependency -> match on component_type sufficient
-
# first build index from component_type to id
-
cmp_type_to_id = Hash.new
-
cmp_deps.each do |id,info|
-
info[:component_dependencies].each do |ct|
-
unless cmp_type_to_id.has_key?(ct)
-
cmp_type_to_id[ct] = (cmp_deps.find{|id_x,info_x|info_x[:component_type] == ct}||[]).first
-
end
-
end
-
end
-
-
# note: dependencies can be omitted if they have already successfully completed; therefore only
-
# looking for non-null deps
-
cmp_ids_with_deps = cmp_deps.inject({}) do |h,(id,info)|
-
non_null_deps = info[:component_dependencies].map{|ct|cmp_type_to_id[ct]}.compact
-
h.merge(id => non_null_deps)
-
end
-
return cmp_ids_with_deps.nil? ? {} : cmp_ids_with_deps
-
end
-
-
# returns array of form [component_id,deps]
-
1
def self.generate_component_order(cmp_ids_with_deps)
-
ordered_cmp_ids = TSortHash.new(cmp_ids_with_deps).tsort
-
ordered_cmp_ids.map do |cmp_id|
-
[cmp_id,cmp_ids_with_deps[cmp_id]]
-
end
-
ordered_cmp_ids
-
end
-
-
1
def add_attribute!(attr)
-
self[:attributes] << attr
-
end
-
-
1
private
-
-
1
def self.component_name(object)
-
ret = (object[:component]||{})[:display_name]
-
ret && ret.gsub(/__/,"::")
-
end
-
-
1
def self.component_status(object,opts)
-
ret = PrettyPrintHash.new
-
if name = component_name(object)
-
ret[:name] = name
-
end
-
component = object[:component]||{}
-
if id = component[:id]
-
ret[:id] = id
-
end
-
ret
-
end
-
-
1
def self.attributes_status(object,opts)
-
# need to query db to get up to date values
-
(object[:attributes]||[]).map do |attr|
-
ret_attr = PrettyPrintHash.new
-
ret_attr[:name] = attr[:display_name]
-
ret_attr[:id] = attr[:id]
-
ret_attr[:value] = attr[:value_asserted]||attr[:value_derived]
-
ret_attr
-
end
-
end
-
-
# returns [actions,config_agent_type]
-
1
def self.create_actions_from_execution_blocks(exec_blocks)
-
actions = Array.new
-
cmps_info = exec_blocks.components_hash_with(:action_methods=>true)
-
config_agent_type = config_agent_type(cmps_info)
-
cmps_info.each do |cmp_hash|
-
cmp = cmp_hash[:component]
-
action_method = cmp_hash[:action_method] # can be nil
-
config_agent_type = (action_method||cmp).config_agent_type
-
hash = {
-
:attributes => Array.new,
-
:component => cmp
-
}
-
if action_method
-
hash.merge!(:action_method => action_method)
-
end
-
actions << new(hash)
-
end
-
[actions,config_agent_type]
-
end
-
1
def self.config_agent_type(cmps_info)
-
ca_types = cmps_info.map do |cmp_hash|
-
cmp = cmp_hash[:component]
-
action_method = cmp_hash[:action_method] # can be nil
-
(action_method||cmp).config_agent_type
-
end.uniq
-
if ca_types.find{|r|r.nil?}
-
raise Error.new("Unexpected that nil is in config_agent_types: #{ca_types.inspect}")
-
end
-
-
if ca_types.size == 1
-
ca_types.first
-
elsif ca_types.empty?
-
ConfigAgent::Type.default_symbol()
-
else
-
raise ErrorUsage.new("Actions with different providers (#{ca_types.join(',')}) cannot be in the same workflow stage")
-
end
-
end
-
1
private_class_method :config_agent_type
-
-
1
def self.create_from_state_change(scs_same_cmp,deps)
-
state_change = scs_same_cmp.first
-
# TODO: may deprecate need for ||[sc[:id]
-
pointer_ids = scs_same_cmp.map{|sc|sc[:linked_ids]||[sc[:id]]}.flatten.compact
-
hash = {
-
:state_change_pointer_ids => pointer_ids, #this field used to update teh coorepdonsing state change after thsi action is run
-
:attributes => Array.new,
-
:component => state_change[:component]
-
}
-
hash.merge!(:component_dependencies => deps) if deps
-
-
# TODO: can get more sophsiticated and handle case where some components installed and other are incremental
-
incremental_change = !scs_same_cmp.find{|sc|not sc[:type] == "setting"}
-
if incremental_change
-
hash.merge!(:changed_attribute_ids => scs_same_cmp.map{|sc|sc[:attribute_id]})
-
end
-
new(hash)
-
end
-
end
-
end
-
end; end
-
2
module DTK; class Task
-
1
module ActionResults
-
1
module Mixin
-
1
def add_action_results(result,action)
-
unless action_results = CommandAndControl.node_action_results(result,action)
-
Log.error_pp(["Unexpected that cannot find data in results:",result])
-
return
-
end
-
# TODO: using task logs for storage; might introduce a new table
-
rows = Array.new
-
action_results.each_with_index do |action_result,pos|
-
label = QualifiedIndex.string_form(self)
-
el = {
-
:content => action_result,
-
:ref => "task_log-#{label}--#{pos.to_s}",
-
:task_id => id(),
-
:display_name => label,
-
:position => pos
-
}
-
rows << el
-
end
-
Model.create_from_rows(child_model_handle(:task_log),rows,{:convert => true})
-
end
-
end
-
-
1
def self.get_action_detail(assembly, action_label, opts={})
-
ret = ''
-
task_mh = assembly.model_handle(:task)
-
unless task = Task.get_top_level_most_recent_task(task_mh,[:eq, :assembly_id, assembly.id()])
-
raise ErrorUsage.new("No tasks found for '#{assembly.display_name_print_form()}'")
-
end
-
# TODO: more efficienct would be to be able to do withone call and filter on action_label in get_all_subtasks_with_logs()
-
subtasks = task.get_all_subtasks_with_logs()
-
task_log_mh = task_mh.createMH(:task_log)
-
sp_hash = {
-
:cols => [:id, :display_name, :content, :position],
-
:filter => [:and, [:eq, :display_name, action_label], [:oneof, :task_id, subtasks.map{|t|t.id()}]]
-
}
-
-
log_entries = Model.get_objs(task_log_mh,sp_hash)
-
if log_entries.empty?
-
raise ErrorUsage.new("Task action with identifier '#{action_label}' does not exist for this service instance.")
-
end
-
ordered_log_entries = log_entries.sort{|a,b|(a[:position]||0) <=> (b[:position]||0)}
-
ordered_log_entries.each do |l|
-
content = l[:content]
-
ret << "==============================================================\n"
-
ret << "RUN: #{content[:description]} \n"
-
ret << "STATUS: #{content[:status]} \n"
-
ret << "STDOUT: #{content[:stdout]}\n\n" if content[:stdout] && !content[:stdout].empty?
-
ret << "STDERR: #{content[:stderr]} \n" if content[:stderr] && !content[:stderr].empty?
-
end
-
ret
-
end
-
-
end
-
end; end
-
#TODO: clean this file up; much cut and patse. moving methods we want to keep towards the top
-
2
module DTK; class Task
-
1
module CreateClassMixin
-
1
def create_from_assembly_instance(assembly,opts={})
-
task = Create.create_from_assembly_instance(assembly,opts)
-
#alters task if needed to decompose node groups into nodes
-
NodeGroupProcessing.decompose_node_groups!(task)
-
end
-
end
-
-
1
class Create
-
1
def self.create_from_assembly_instance(assembly,opts={})
-
component_type = opts[:component_type]||:service
-
target_idh = target_idh_from_assembly(assembly)
-
task_mh = target_idh.create_childMH(:task)
-
-
ret = create_top_level_task(task_mh,assembly,Aux.hash_subset(opts,[:commit_msg,:task_action]))
-
assembly_nodes = assembly.get_leaf_nodes(:cols => [:id,:display_name,:type,:external_ref,:admin_op_status])
-
-
start_nodes, create_nodes = [], []
-
assembly_nodes.reject!{|n| n[:type].eql?('assembly_wide')}
-
assembly_nodes.each do |a_node|
-
if a_node[:admin_op_status].eql?('pending')
-
create_nodes << a_node
-
else
-
start_nodes << a_node
-
end
-
end
-
-
case component_type
-
when :smoketest then nil # smoketest should not create a node
-
when :service
-
# start stopped nodes
-
if start_node = opts[:start_node_changes]
-
action_type = :power_on_node
-
action_class = Action::PowerOnNode
-
node_scs = StateChange::Assembly.node_state_changes(action_type,assembly,target_idh,{:just_leaf_nodes => true, :nodes => start_nodes})
-
if nodes = opts[:ret_nodes]
-
node_scs.each{|sc|nodes << sc[:node]}
-
end
-
start_nodes_task = NodesTask.create_subtask(action_class,task_mh,node_scs)
-
end
-
# create nodes
-
unless create_nodes.empty?
-
action_type = :create_node
-
action_class = Action::CreateNode
-
node_scs = StateChange::Assembly.node_state_changes(action_type,assembly,target_idh,{:just_leaf_nodes => true, :nodes => create_nodes})
-
create_nodes_task = NodesTask.create_subtask(action_class,task_mh,node_scs)
-
end
-
else
-
raise Error.new("Unexpected component_type (#{component_type})")
-
end
-
-
opts_tt = opts.merge(:component_type_filter => component_type)
-
task_template_content = Template::ConfigComponents.get_or_generate_template_content([:assembly,:node_centric],assembly,opts_tt)
-
stages_config_nodes_task = task_template_content.create_subtask_instances(task_mh,assembly.id_handle())
-
-
if start_nodes_task.nil? and create_nodes_task.nil? and stages_config_nodes_task.empty?
-
raise ErrorUsage.new("There are no actions in the service instance")
-
end
-
-
ret.add_subtask(create_nodes_task) if create_nodes_task
-
ret.add_subtask(start_nodes_task) if start_nodes_task
-
ret.add_subtasks(stages_config_nodes_task) unless stages_config_nodes_task.empty?
-
ret
-
end
-
-
#TODO: below will be private when finish refactoring this file
-
1
def self.target_idh_from_assembly(assembly)
-
assembly.get_target().id_handle()
-
end
-
1
def self.create_new_task(task_mh,hash)
-
Task.create_stub(task_mh,hash)
-
end
-
-
1
def self.create_top_level_task(task_mh,assembly,opts={})
-
task_info_hash = {
-
:assembly_id => assembly.id,
-
:display_name => opts[:task_action] || "assembly_converge",
-
:temporal_order => "sequential",
-
}
-
if commit_msg = opts[:commit_msg]
-
task_info_hash.merge!(:commit_message => commit_msg)
-
end
-
-
create_new_task(task_mh,task_info_hash)
-
end
-
-
1
class NodesTask < self
-
1
def self.create_subtask(action_class,task_mh,state_change_list)
-
return nil unless state_change_list and not state_change_list.empty?
-
ret = nil
-
all_actions = Array.new
-
if state_change_list.size == 1
-
executable_action = action_class.create_from_state_change(state_change_list.first)
-
all_actions << executable_action
-
ret = create_new_task(task_mh,subtask_hash(action_class,executable_action))
-
else
-
ret = create_new_task(task_mh,concurrent_subtask(action_class))
-
state_change_list.each do |sc|
-
executable_action = action_class.create_from_state_change(sc)
-
all_actions << executable_action
-
ret.add_subtask_from_hash(subtask_hash(action_class,executable_action))
-
end
-
end
-
attr_mh = task_mh.createMH(:attribute)
-
action_class.add_attributes!(attr_mh,all_actions)
-
ret
-
end
-
-
1
private
-
1
def self.concurrent_subtask(action_class)
-
{
-
:display_name => action_class.stage_display_name(),
-
:temporal_order => "concurrent"
-
}
-
end
-
-
1
def self.subtask_hash(action_class,executable_action)
-
{
-
:display_name => action_class.task_display_name(),
-
:executable_action => executable_action
-
}
-
end
-
end
-
end
-
-
#TODO: move from below when decide whether needed; looking to generalize above so can subsume below
-
1
module CreateClassMixin
-
1
def task_when_nodes_ready_from_assembly(assembly, component_type, opts)
-
assembly_idh = assembly.id_handle()
-
target_idh = target_idh_from_assembly(assembly)
-
task_mh = target_idh.create_childMH(:task)
-
-
main_task = create_new_task(task_mh,:assembly_id => assembly_idh.get_id(),:display_name => "power_on_nodes", :temporal_order => "concurrent",:commit_message => nil)
-
opts.merge!(:main_task => main_task)
-
-
assembly_config_changes = StateChange::Assembly::component_state_changes(assembly,component_type)
-
running_node_task = create_running_node_task_from_assembly(task_mh, assembly_config_changes, opts)
-
# running_node_task = create_running_node_task(task_mh, assembly_config_changes)
-
-
# main_task.add_subtask(running_node_task)
-
-
running_node_task
-
end
-
-
#This is is the 'inventory node groups', not the node groups in the service instances'
-
1
def create_from_node_group(node_group_idh,commit_msg=nil)
-
ret = nil
-
target_idh = node_group_idh.get_parent_id_handle_with_auth_info()
-
task_mh = target_idh.create_childMH(:task)
-
node_mh = target_idh.create_childMH(:node)
-
node_group = node_group_idh.create_object()
-
-
create_nodes_changes = StateChange::NodeCentric::SingleNodeGroup.node_state_changes(target_idh,:node_group => node_group)
-
create_nodes_task = create_nodes_task(task_mh,create_nodes_changes)
-
-
config_nodes_changes = StateChange::NodeCentric::SingleNodeGroup.component_state_changes(node_mh,:node_group => node_group)
-
config_nodes_task = config_nodes_task(task_mh,config_nodes_changes)
-
-
ret = create_new_task(task_mh,:temporal_order => "sequential",:node_id => node_group_idh.get_id(),:display_name => "node_group_converge", :commit_message => commit_msg)
-
if create_nodes_task and config_nodes_task
-
ret.add_subtask(create_nodes_task)
-
ret.add_subtask(config_nodes_task)
-
else
-
if sub_task = create_nodes_task||config_nodes_task
-
ret.add_subtask(create_nodes_task||config_nodes_task)
-
else
-
ret = nil
-
end
-
end
-
ret
-
end
-
# TODO: might collapse these different creates for node, node_group, assembly
-
1
def create_from_node(node_idh,commit_msg=nil)
-
ret = nil
-
target_idh = node_idh.get_parent_id_handle_with_auth_info()
-
task_mh = target_idh.create_childMH(:task)
-
node_mh = target_idh.create_childMH(:node)
-
node = node_idh.create_object().update_object!(:display_name)
-
-
create_nodes_changes = StateChange::NodeCentric::SingleNode.node_state_changes(target_idh,:node => node)
-
create_nodes_task = create_nodes_task(task_mh,create_nodes_changes)
-
-
# TODO: need to update this to :use_task_templates
-
config_nodes_changes = StateChange::NodeCentric::SingleNode.component_state_changes(node_mh,:node => node)
-
config_nodes_task = config_nodes_task(task_mh,config_nodes_changes)
-
-
ret = create_new_task(task_mh,:temporal_order => "sequential",:node_id => node_idh.get_id(),:display_name => "node_converge", :commit_message => commit_msg)
-
if create_nodes_task and config_nodes_task
-
ret.add_subtask(create_nodes_task)
-
ret.add_subtask(config_nodes_task)
-
else
-
if sub_task = create_nodes_task||config_nodes_task
-
ret.add_subtask(create_nodes_task||config_nodes_task)
-
else
-
ret = nil
-
end
-
end
-
ret
-
end
-
-
1
def power_on_from_node(node_idh,commit_msg=nil)
-
ret = nil
-
target_idh = node_idh.get_parent_id_handle_with_auth_info()
-
task_mh = target_idh.create_childMH(:task)
-
node_mh = target_idh.create_childMH(:node)
-
node = node_idh.create_object().update_object!(:display_name)
-
-
power_on_nodes_changes = StateChange::NodeCentric::SingleNode.component_state_changes(node_mh,:node => node)
-
power_on_nodes_task = create_running_node_task(task_mh,power_on_nodes_changes, :node => node)
-
-
ret = create_new_task(task_mh,:temporal_order => "sequential",:node_id => node_idh.get_id(),:display_name => "node_converge", :commit_message => commit_msg)
-
if power_on_nodes_task
-
ret.add_subtask(power_on_nodes_task)
-
else
-
ret = nil
-
end
-
ret
-
end
-
-
# TODO: might deprecate
-
1
def create_from_pending_changes(parent_idh,state_change_list)
-
task_mh = parent_idh.create_childMH(:task)
-
grouped_state_changes = group_by_node_and_type(state_change_list)
-
grouped_state_changes.each_key do |type|
-
unless [Action::CreateNode,Action::ConfigNode].include?(type)
-
Log.error("treatment of task action type #{type.to_s} not yet treated; it will be ignored")
-
grouped_state_changes.delete(type)
-
next
-
end
-
end
-
# if have both create_node and config node then top level has two stages create_node then config node
-
create_nodes_task = create_nodes_task(task_mh,grouped_state_changes[Action::CreateNode])
-
config_nodes_task = config_nodes_task(task_mh,grouped_state_changes[Action::ConfigNode])
-
if create_nodes_task and config_nodes_task
-
ret = create_new_task(task_mh,:temporal_order => "sequential")
-
ret.add_subtask(create_nodes_task)
-
ret.add_subtask(config_nodes_task)
-
ret
-
else
-
ret = create_new_task(task_mh,:temporal_order => "sequential")
-
ret.add_subtask(create_nodes_task||config_nodes_task) #only one wil be non null
-
ret
-
end
-
end
-
-
1
def create_install_agents_task(target, nodes,opts={})
-
stub_create_install_agents_task(target,nodes,opts)
-
end
-
-
1
def stub_create_install_agents_task(target, nodes, opts={})
-
target_idh = target.id_handle()
-
task_mh = target_idh.create_childMH(:task)
-
# executable_action = {:node=>
-
# {
-
# :id=>2147626569,
-
# :display_name=>"imported_node_1",
-
# :group_id=>2147483732,
-
# :datacenter => target,
-
# :external_ref=>{
-
# "type"=>"physical",
-
# "routable_host_address"=>"ec2-54-227-229-14.compute-1.amazonaws.com",
-
# "ssh_credentials"=>{
-
# "ssh_user"=>"ubuntu",
-
# "ssh_password"=>"1ubuntu",
-
# "ssh_rsa_private_key"=>"PRIVATE_KEY",
-
# "sudo_password"=>"PASSWD"
-
# }
-
# }
-
# }#,
-
# # :state_change_types=>["install_agent"]
-
# }
-
-
main = create_new_task(task_mh, :executable_action_type => "InstallAgent", :target_id => target_idh.get_id(), :display_name => "install_agents", :temporal_order => "concurrent")
-
num_nodes = (opts[:debug_num_nodes]||3).to_i
-
-
subtasks = []
-
(1..num_nodes).each do |num|
-
if node = nodes.pop
-
ret = create_new_task(task_mh, :executable_action_type => "InstallAgent", :target_id => target_idh.get_id(), :display_name => "install_agent", :temporal_order => "sequential")
-
-
executable_action = Action::PhysicalNode.create_from_physical_nodes(target, node)
-
subtask = create_new_task(task_mh, :executable_action_type => "InstallAgent", :executable_action => executable_action, :display_name => "install_agent_#{num.to_s}")#, :temporal_order => "sequential")
-
ret.add_subtask(subtask)
-
-
executable_action = Action::PhysicalNode.create_smoketest_from_physical_nodes(target, node)
-
subtask = create_new_task(task_mh, :executable_action_type => "ExecuteSmoketest", :executable_action => executable_action, :display_name => "execute_smoketest_#{num.to_s}")#, :temporal_order => "sequential")
-
ret.add_subtask(subtask)
-
-
main.add_subtask(ret)
-
end
-
end
-
-
main
-
end
-
-
1
private
-
1
def target_idh_from_assembly(assembly)
-
Create.target_idh_from_assembly(assembly)
-
end
-
-
1
def create_nodes_task(task_mh,state_change_list)
-
return nil unless state_change_list and not state_change_list.empty?
-
# each element will be list with single element
-
ret = nil
-
all_actions = Array.new
-
if state_change_list.size == 1
-
executable_action = Action::CreateNode.create_from_state_change(state_change_list.first.first)
-
all_actions << executable_action
-
ret = create_new_task(task_mh,:executable_action => executable_action)
-
else
-
ret = create_new_task(task_mh,:display_name => "create_node_stage", :temporal_order => "concurrent")
-
state_change_list.each do |sc|
-
executable_action = Action::CreateNode.create_from_state_change(sc.first)
-
all_actions << executable_action
-
ret.add_subtask_from_hash(:executable_action => executable_action)
-
end
-
end
-
attr_mh = task_mh.createMH(:attribute)
-
Action::CreateNode.add_attributes!(attr_mh,all_actions)
-
ret
-
end
-
-
1
def create_running_node_task_from_assembly(task_mh,state_change_list,opts={})
-
main_task = opts[:main_task]
-
nodes = opts[:nodes]
-
nodes_wo_components = []
-
-
# for powering on node with no components
-
unless state_change_list and not state_change_list.empty?
-
unless node = opts[:node]
-
raise Error.new("Expected that :node passed in as options")
-
end
-
-
executable_action = Action::PowerOnNode.create_from_node(node)
-
attr_mh = task_mh.createMH(:attribute)
-
Action::PowerOnNode.add_attributes!(attr_mh,[executable_action])
-
ret = create_new_task(task_mh,:executable_action => executable_action, :display_name => "power_on_node")
-
main_task.add_subtask(ret)
-
-
return main_task
-
end
-
-
if nodes
-
nodes_wo_components = nodes.dup
-
state_change_list.each do |sc|
-
if node = sc.first[:node]
-
nodes_wo_components.delete_if{|n| n[:id] == node[:id]}
-
end
-
end
-
end
-
-
ret = nil
-
all_actions = Array.new
-
if nodes_wo_components.empty?
-
# if assembly start called from node/node_id context,
-
# do not start all nodes but one that command is executed from
-
state_change_list = state_change_list.select{|s| s.first[:node][:id]==opts[:node][:id]} if opts[:node]
-
-
# each element will be list with single element
-
if state_change_list.size == 1
-
executable_action = Action::PowerOnNode.create_from_state_change(state_change_list.first.first)
-
all_actions << executable_action
-
ret = create_new_task(task_mh,:executable_action => executable_action,:display_name => "power_on_node")
-
main_task.add_subtask(ret)
-
else
-
# ret = create_new_task(task_mh,:display_name => "power_on_nodes", :temporal_order => "concurrent")
-
state_change_list.each do |sc|
-
executable_action = Action::PowerOnNode.create_from_state_change(sc.first)
-
all_actions << executable_action
-
main_task.add_subtask_from_hash(:executable_action => executable_action,:display_name => "power_on_node")
-
end
-
end
-
else
-
nodes.each do |node|
-
executable_action = Action::PowerOnNode.create_from_node(node)
-
all_actions << executable_action
-
ret = create_new_task(task_mh,:executable_action => executable_action, :display_name => "power_on_node")
-
main_task.add_subtask(ret)
-
end
-
end
-
attr_mh = task_mh.createMH(:attribute)
-
Action::PowerOnNode.add_attributes!(attr_mh,all_actions)
-
main_task
-
end
-
-
1
def create_running_node_task(task_mh,state_change_list,opts={})
-
# for powering on node with no components
-
unless state_change_list and not state_change_list.empty?
-
unless node = opts[:node]
-
raise Error.new("Expected that :node passed in as options")
-
end
-
executable_action = Action::PowerOnNode.create_from_node(node)
-
attr_mh = task_mh.createMH(:attribute)
-
Action::PowerOnNode.add_attributes!(attr_mh,[executable_action])
-
return create_new_task(task_mh,:executable_action => executable_action)
-
end
-
-
# each element will be list with single element
-
ret = nil
-
all_actions = Array.new
-
if state_change_list.size == 1
-
executable_action = Action::PowerOnNode.create_from_state_change(state_change_list.first.first)
-
all_actions << executable_action
-
ret = create_new_task(task_mh,:executable_action => executable_action)
-
else
-
ret = create_new_task(task_mh,:display_name => "create_node_stage", :temporal_order => "concurrent")
-
state_change_list.each do |sc|
-
executable_action = Action::PowerOnNode.create_from_state_change(sc.first)
-
all_actions << executable_action
-
ret.add_subtask_from_hash(:executable_action => executable_action)
-
end
-
end
-
attr_mh = task_mh.createMH(:attribute)
-
Action::PowerOnNode.add_attributes!(attr_mh,all_actions)
-
ret
-
end
-
-
# TODO: think asseumption is that each elemnt corresponds to changes to same node; if this is case may change input datastructure
-
# so node is not repeated for each element corresponding to same node
-
1
def config_nodes_task(task_mh,state_change_list,assembly_idh=nil, stage_index=nil)
-
return nil unless state_change_list and not state_change_list.empty?
-
ret = nil
-
all_actions = Array.new
-
if state_change_list.size == 1
-
executable_action, error_msg = get_executable_action_from_state_change(state_change_list.first, assembly_idh, stage_index)
-
raise ErrorUsage.new(error_msg) unless executable_action
-
all_actions << executable_action
-
ret = create_new_task(task_mh,:display_name => "config_node_stage#{stage_index}", :temporal_order => "concurrent")
-
ret.add_subtask_from_hash(:executable_action => executable_action)
-
else
-
ret = create_new_task(task_mh,:display_name => "config_node_stage#{stage_index}", :temporal_order => "concurrent")
-
all_errors = Array.new
-
state_change_list.each do |sc|
-
executable_action, error_msg = get_executable_action_from_state_change(sc, assembly_idh, stage_index)
-
unless executable_action
-
all_errors << error_msg
-
next
-
end
-
all_actions << executable_action
-
ret.add_subtask_from_hash(:executable_action => executable_action)
-
end
-
raise ErrorUsage.new("\n" + all_errors.join("\n")) unless all_errors.empty?
-
end
-
attr_mh = task_mh.createMH(:attribute)
-
Action::ConfigNode.add_attributes!(attr_mh,all_actions)
-
ret
-
end
-
-
# Amar
-
# moved call to ConfigNode.create_from_state_change into this method for error handling with clear message to user
-
# if TSort throws TSort::Cyclic error, it means intra-node cycle case
-
1
def get_executable_action_from_state_change(state_change, assembly_idh, stage_index)
-
executable_action = nil
-
error_msg = nil
-
begin
-
executable_action = Action::ConfigNode.create_from_state_change(state_change, assembly_idh)
-
executable_action.set_inter_node_stage!(stage_index)
-
rescue TSort::Cyclic => e
-
node = state_change.first[:node]
-
display_name = node[:display_name]
-
id = node[:id]
-
cycle_comp_ids = e.message.match(/.*\[(.+)\]/)[1]
-
component_names = Array.new
-
state_change.each do |cmp|
-
component_names << "#{cmp[:component][:display_name]} (ID: #{cmp[:component][:id].to_s})" if cycle_comp_ids.include?(cmp[:component][:id].to_s)
-
end
-
error_msg = "Intra-node components cycle detected on node '#{display_name}' (ID: #{id}) for components: #{component_names.join(', ')}"
-
end
-
return executable_action, error_msg
-
end
-
-
1
def group_by_node_and_type(state_change_list)
-
indexed_ret = Hash.new
-
state_change_list.each do |sc|
-
type = map_state_change_to_task_action(sc[:type])
-
unless type
-
Log.error("unexpected state change type encountered #{sc[:type]}; ignoring")
-
next
-
end
-
node_id = sc[:node][:id]
-
indexed_ret[type] ||= Hash.new
-
indexed_ret[type][node_id] ||= Array.new
-
indexed_ret[type][node_id] << sc
-
end
-
indexed_ret.inject({}){|ret,o|ret.merge(o[0] => o[1].values)}
-
end
-
-
1
def map_state_change_to_task_action(state_change)
-
@mapping_sc_to_task_action ||= {
-
"create_node" => Action::CreateNode,
-
"install_component" => Action::ConfigNode,
-
"update_implementation" => Action::ConfigNode,
-
"converge_component" => Action::ConfigNode,
-
"setting" => Action::ConfigNode
-
}
-
@mapping_sc_to_task_action[state_change]
-
end
-
-
1
def create_new_task(task_mh,hash)
-
Create.create_new_task(task_mh,hash)
-
end
-
end
-
end; end
-
-
-
-
1
module DTK
-
1
class Task < Model
-
1
module NodeGroupProcessingMixin
-
1
def node_group_member?()
-
(self[:executable_action]||{})[:node_group_member]
-
end
-
-
1
def set_node_group_member_executable_action!(parent)
-
ret = self
-
unless ea = self[:executable_action]
-
Log.error("Unexpected that self does not have field :executable_action")
-
return ret
-
end
-
unless parent_ea = parent[:executable_action]
-
Log.error("Unexpected that parent does not have field :executable_action")
-
return ret
-
end
-
ExecuteActionFieldsToCopy.each{|field|ea[field] = parent_ea[field]}
-
ret
-
end
-
1
ExecuteActionFieldsToCopy = [:component_actions,:state_change_types,:config_agent_type,:assembly_idh,:inter_node_stage]
-
end
-
-
1
module NodeGroupProcessing
-
#replaces node groups with theit elements
-
1
def self.decompose_node_groups!(task)
-
decompose!(task)
-
task
-
end
-
1
private
-
1
def self.decompose!(task)
-
case task.basic_type()
-
when :executable_action
-
decompose_executable_action!(task)
-
when :decomposed_node_group
-
#no op
-
when :sequential
-
task.subtasks.map{|st|decompose!(st)}
-
when :concurrent
-
task.subtasks.map{|st|decompose!(st)}
-
else
-
Log.error("do not have rules to process task")
-
end
-
end
-
-
1
def self.decompose_executable_action!(task)
-
# noop if this is not a node group that decomposes
-
ea = task[:executable_action]
-
return unless ea.node_is_node_group?()
-
-
#modify task so that it is a concurrent decomposed task
-
task[:temporal_order] = "concurrent"
-
ea[:decomposed_node_group] = true
-
task[:subtasks] = ea.nodes.map{|node|node_group_member(node,task)}
-
end
-
-
1
def self.node_group_member(node,parent_task)
-
executable_action = parent_task[:executable_action].create_node_group_member(node)
-
Task.create_stub(parent_task.model_handle(),:executable_action => executable_action)
-
end
-
end
-
end
-
end
-
2
module DTK; class Task
-
1
module QualifiedIndex
-
1
Field = :qualified_index
-
-
1
def self.string_form(task)
-
convert_to_string_form(task[Field])
-
end
-
-
1
def self.compute!(subtask_indexes,top_task)
-
compute_recursive!(subtask_indexes,top_task.id() => {})
-
end
-
-
1
private
-
1
def self.convert_to_string_form(qualified_index)
-
qualified_index ? qualified_index.map{|r|r.to_s}.join(LabelIndexDelimeter) : ''
-
end
-
1
LabelIndexDelimeter = '.'
-
-
# subtask_indexes hash form
-
# {subtask_id => {:parent_id => ..., :index => ...}
-
1
def self.compute_recursive!(subtask_indexes,parents)
-
ret = Hash.new
-
parent_ids = parents.keys
-
subtask_indexes.each_pair do |subtask_id,info|
-
if parent = parents[info[:parent_id]]
-
subtask = subtask_indexes.delete(subtask_id)
-
subtask[Field] = (parent[Field]||[]) + [subtask[:index]]
-
ret.merge!(subtask_id => subtask)
-
end
-
end
-
if ret.empty? or subtask_indexes.empty?
-
ret
-
else
-
ret.merge(compute_recursive!(subtask_indexes,ret))
-
end
-
end
-
end
-
end; end
-
1
module DTK
-
1
class Task
-
#TODO: need to clean up stage classes
-
1
r8_nested_require('stage','intra_node')
-
1
r8_nested_require('stage','puppet_stage_generator')
-
end
-
end
-
1
module XYZ
-
1
module Stage
-
1
class IntraNode
-
1
def self.generate_stages(component_dependencies, state_change_list)
-
return [component_dependencies] if component_dependencies.size == 1
-
-
stages = Array.new
-
prev_deps_count = component_dependencies.size
-
while !(stage = generate_stage(component_dependencies)).empty?
-
# Checks for inter node dependency cycle and throws error if cycle present
-
prev_deps_count = detect_internode_cycle(component_dependencies, prev_deps_count, state_change_list)
-
stages << stage
-
end
-
# Additional check in case when generate_stage method returns empty hash in first iteration
-
detect_internode_cycle(component_dependencies, prev_deps_count, state_change_list)
-
-
# Amar TODO: save intranode stages
-
return stages
-
end
-
-
1
private
-
1
def self.generate_stage(component_dependencies)
-
stage = Hash.new
-
parents = component_dependencies.keys
-
component_dependencies.map do |parent, children|
-
# If there are no component dependencies, add to stage
-
if children.empty?
-
stage[parent] = children
-
next
-
end
-
# if all parrents and current children have no shared elements
-
# or in other words if dep comps are not present in any of the parents,
-
# add to stage
-
stage[parent] = children if (parents & children).empty?
-
end
-
stage.map { |k,v| component_dependencies.delete(k) }
-
return stage
-
end
-
-
1
def self.detect_internode_cycle(component_dependencies, prev_deps_count, state_change_list)
-
cur_deps_count = component_dependencies.size
-
if prev_deps_count == cur_deps_count && prev_deps_count != 0
-
# Gathering data for error's pretty print on CLI side
-
cmp_ids = component_dependencies.keys
-
node_id = state_change_list.first[:node][:id]
-
node_name = state_change_list.first[:node][:display_name]
-
cmp_dep_str = Array.new
-
state_change_list.each do |cmp|
-
cmp_dep_str << "#{cmp[:component][:display_name]}(ID: #{cmp[:component][:id]})" if cmp_ids.include?(cmp[:component][:id])
-
end
-
error_msg = "Intra-node components cycle detected on node '#{node_name}' (ID: #{node_id}) for components: #{cmp_dep_str.join(', ')}"
-
raise ErrorUsage.new(error_msg)
-
end
-
return cur_deps_count
-
end
-
-
end
-
end
-
end
-
1
module XYZ
-
1
module Stage
-
-
# Amar: This class will generate part of the flow that enables multiple puppet calls
-
# within one puppet_agent execution
-
-
# First implementation will do this staging type only for node groups.
-
# Components from one or more node groups will be extracted from list of all
-
# components into coresponding stages. Components from assembly instance will
-
# be added as a last element of the group list.
-
#
-
# Later in the flow, for this group list, each group will be intra node stage,
-
# based on which puppet manifest will be generated.
-
#
-
# TODO: Future implementation should allow user to manipulate component grouping...
-
1
class PuppetStageGenerator
-
1
def self.generate_stages(component_dependencies, state_change_list)
-
cd_group = Hash.new
-
scl_group = Hash.new
-
cd_assembly = Hash.new
-
scl_assembly = Array.new
-
-
# Go through all components
-
state_change_list.each do |sc|
-
component_node_id = sc[:component][:node_node_id]
-
# If node id and component's node_node_id are different,
-
# it means components comes from node group and not from assembly instance.
-
# make puppet stages and return grouped results.
-
unless sc[:node][:id] == component_node_id
-
scl_group[component_node_id] ||= Array.new
-
scl_group[component_node_id] << sc
-
cd_group[component_node_id] ||= Hash.new
-
cd_group[component_node_id][sc[:component][:id]] = component_dependencies[sc[:component][:id]]
-
else
-
cd_assembly[sc[:component][:id]] = component_dependencies[sc[:component][:id]]
-
scl_assembly << sc
-
end
-
end
-
-
cd_ret = cd_group.values
-
cd_ret << cd_assembly
-
scl_ret = scl_group.values
-
scl_ret << scl_assembly
-
-
return cd_ret, scl_ret
-
end
-
end
-
end
-
end
-
1
module DTK
-
1
class Task
-
1
class Status
-
1
r8_nested_require('status','table_form')
-
1
r8_nested_require('status','list_form')
-
-
1
def self.get_active_top_level_tasks(model_handle)
-
# TODO: need protection so dont get stake tasks that never came out of executing mode
-
filter = [:and, [:eq,:status,"executing"],[:or,[:neq,:assembly_id,nil],[:neq,:node_id,nil]]]
-
Task.get_top_level_tasks(model_handle,filter)
-
end
-
-
1
def self.find_nodes_that_are_active(model_handle)
-
ret = Array.new
-
model_handle = model_handle.createMH(:task)
-
top_level_active = get_active_top_level_tasks(model_handle)
-
return ret if top_level_active.empty?
-
# TODO: way to make call Task.get_all_subtasks faster
-
ndx_ret = Hash.new
-
Task.get_all_subtasks(top_level_active.map{|t|t.id_handle}).each do |sub_task|
-
if node = (sub_task[:executable_action] && sub_task[:executable_action][:node])
-
ndx_ret[node[:id]] ||= node
-
end
-
end
-
ndx_ret.values
-
end
-
-
1
private
-
1
def self.get_status_aux(task_obj_idh,task_obj_type,filter,opts={})
-
task_mh = task_obj_idh.createMH(:task)
-
-
unless task = Task.get_top_level_most_recent_task(task_mh,filter)
-
task_obj = task_obj_idh.create_object().update_object!(:display_name)
-
raise ErrorUsage.new("No tasks found for #{task_obj_type} (#{task_obj[:display_name]})")
-
end
-
-
task_structure = Task.get_hierarchical_structure(task_mh.createIDH(:id => task[:id]))
-
status_opts = Opts.new(:no_components => false, :no_attributes => true)
-
status_opts.merge!(:summarize_node_groups => true) if (opts[:detail_level]||{})[:summarize_node_groups]
-
case opts[:format]
-
when :table
-
TableForm.status(task_structure,status_opts)
-
when :list
-
ListForm.status(task_structure,task_obj_idh.createMH(:node))
-
else
-
task_structure.status(status_opts)
-
end
-
end
-
-
1
class Assembly < self
-
1
def self.get_active_nodes(model_handle)
-
find_nodes_that_are_active(model_handle)
-
end
-
-
1
def self.get_status(assembly_idh,opts={})
-
filter = [:eq, :assembly_id, assembly_idh.get_id()]
-
get_status_aux(assembly_idh, :assembly, filter, opts)
-
end
-
end
-
-
1
class Node < self
-
1
def self.get_status(node_idh,opts={})
-
filter = [:eq, :node_id, node_idh.get_id()]
-
get_status_aux(node_idh,:node,filter,opts)
-
end
-
end
-
-
1
class NodeGroup < self
-
1
def self.get_status(node_group_idh,opts={})
-
filter = [:eq, :node_id, node_group_idh.get_id()]
-
get_status_aux(node_group_idh,:node_group,filter,opts)
-
end
-
end
-
-
1
class Target < self
-
1
def self.get_status(target_idh, opts={})
-
filter = [:eq, :target_id, target_idh.get_id()]
-
get_status_aux(target_idh, :target, filter, opts)
-
end
-
end
-
-
1
class Opts < Hash
-
1
def initialize(hash_opts={})
-
super()
-
replace(hash_opts) unless hash_opts.empty?
-
end
-
end
-
end
-
-
1
module StatusMixin
-
# TODO: move to own file
-
1
def status_hash_form(opts,level=1)
-
set_and_return_types!()
-
ret = PrettyPrintHash.new
-
if level == 1
-
ret.add(self,:type,:id,:status,:commit_message?)
-
else
-
ret.add(self,:type,:status)
-
end
-
ret.add(self,:started_at?)
-
ret.add(self,:ended_at?)
-
num_subtasks = subtasks.size
-
ret.add(self,:temporal_order) if num_subtasks > 1
-
if num_subtasks > 0
-
ret.add(self,:subtasks) do |subtasks|
-
subtasks.sort{|a,b| (a[:position]||0) <=> (b[:position]||0)}.map{|st|st.status_hash_form(opts,level+1)}
-
end
-
end
-
case self[:executable_action_type]
-
when "ConfigNode"
-
if ea = self[:executable_action]
-
ret.merge!(Action::ConfigNode.status(ea,opts))
-
end
-
when "CreateNode"
-
if ea = self[:executable_action]
-
ret.merge!(Action::CreateNode.status(ea,opts))
-
end
-
end
-
errors = get_errors()
-
ret[:errors] = errors unless errors.empty?
-
ret
-
end
-
-
1
def hier_task_idhs()
-
[id_handle()] + subtasks.map{|r|r.hier_task_idhs()}.flatten
-
end
-
-
# TODO: probably better to set when creating
-
1
def set_and_return_types!()
-
type =
-
if self[:task_id].nil?
-
self[:display_name]||"commit_cfg_changes"
-
elsif action_type = self[:executable_action_type]
-
ActionTypeCodes[action_type.to_s]
-
elsif self[:display_name]
-
self[:display_name]
-
else
-
# TODO: probably deprecate below; it at least needs fixing up
-
# assumption that all subtypes some type
-
if sample_st = subtasks.first
-
if sample_st[:executable_action_type]
-
sample_type = ActionTypeCodes[sample_st[:executable_action_type]]
-
suffix = /config_node(\w.+)/.match(self[:display_name])[1] if sample_st[:executable_action_type] == "ConfigNode"
-
sample_type && "#{sample_type}s#{suffix}" #make plural
-
end
-
end
-
end
-
-
subtasks.each{|st|st.set_and_return_types!()}
-
self[:type] = type
-
end
-
-
1
ActionTypeCodes = {
-
"ConfigNode" => "configure_node",
-
"CreateNode" => "create_node"
-
}
-
-
1
def hier_task_idhs()
-
[id_handle()] + subtasks.map{|r|r.hier_task_idhs()}.flatten
-
end
-
1
protected :hier_task_idhs
-
-
# for debugging
-
1
def pretty_print_hash()
-
ret = PrettyPrintHash.new
-
ret.add(self,:id,:status)
-
num_subtasks = subtasks.size
-
# only include :temporal_order if more than 1 subtask
-
ret.add(self,:temporal_order) if num_subtasks > 1
-
if num_subtasks > 0
-
ret.add(self,:subtasks) do |subtasks|
-
subtasks.sort{|a,b| (a[:position]||0) <=> (b[:position]||0)}.map{|st|st.pretty_print_hash()}
-
end
-
end
-
action_type = self[:executable_action_type]
-
case action_type
-
when "ConfigNode"
-
ret.add(self,:executable_action_type)
-
ret.add(self,:executable_action?){|ea|Action::ConfigNode.pretty_print_hash(ea)}
-
when "CreateNode"
-
ret.add(self,:executable_action_type)
-
ret.add(self,:executable_action?){|ea|Action::CreateNode.pretty_print_hash(ea)}
-
else
-
ret.add(self,:executable_action_type?,:executable_action?)
-
end
-
ret
-
end
-
end
-
end
-
end
-
3
module DTK; class Task; class Status
-
1
module ListForm
-
# This method will return task details in form of list. It is used when CLI list-task-info is invoked
-
1
def self.status(task_structure,model_handle)
-
ret = Hash.new
-
-
ret[:task_id] = task_structure[:id]
-
ret[:task_name] = task_structure[:display_name]
-
ret[:temporal_order] = task_structure[:temporal_order]
-
ret[:actions] = Array.new
-
-
level_1 = task_structure[:subtasks]
-
level_1.each do |l1|
-
level_1_ret = Hash.new
-
level_1_ret[:temporal_order] = l1[:temporal_order]
-
level_1_ret[:task_name] = l1[:display_name]
-
level_2 = [l1]
-
level_2 = l1[:subtasks] if l1[:subtasks]
-
level_1_ret[:nodes] = Array.new
-
level_2.each do |l2|
-
level_2_ret = Hash.new
-
level_2_ret[:node_name] = l2[:executable_action][:node][:display_name]
-
if l2[:executable_action_type] == "CreateNode"
-
level_2_ret[:task_name] = "create_node"
-
level_2_ret[:node_id] = l2[:executable_action][:node][:id]
-
# Amar: Special case when 1 node present, to skip printing 'task1' on CLI for create_node_stage
-
level_1_ret[:task_name] = "create_node_stage" if l1[:subtasks].nil? && l1[:display_name].include?("task")
-
elsif l2[:executable_action_type] == "ConfigNode"
-
level_2_ret[:task_name] = "config_node"
-
level_2_ret[:components] = Array.new
-
level_3 = l2[:executable_action][:component_actions]
-
level_3.each do |l3|
-
# Amar: Following condition block checks if 'node_node_id' from component is identical to node's 'id'
-
# If two values are different, it means component came from node_group, and not from assembly instance
-
# Result is printing component source
-
# Check DTK-738 ticket for more details
-
source = "instance"
-
unless l3[:component][:node_node_id] == l2[:executable_action][:node][:id]
-
node_group = NodeGroup.id_to_name(model_handle, l3[:component][:node_node_id])
-
source = "node_group"
-
end
-
level_2_ret[:components] <<
-
{ :component =>
-
{
-
:component_name => l3[:component][:display_name],
-
:source => source,
-
:node_group => node_group
-
}
-
}
-
end
-
end
-
level_1_ret[:nodes] << level_2_ret
-
end
-
ret[:actions] << level_1_ret
-
end
-
ret
-
end
-
end
-
end; end; end
-
3
module DTK; class Task; class Status
-
1
module TableForm
-
1
DURATION_ACCURACY = 1
-
-
1
r8_nested_require('table_form','node_group_summary')
-
1
module Mixin
-
1
def status_table_form(opts)
-
TableForm.status_table_form_top(self,opts)
-
end
-
end
-
-
1
def self.status(task_structure,opts={})
-
task_structure.status_table_form(opts)
-
end
-
-
1
def self.status_table_form_top(task,opts)
-
status_table_form(task,opts)
-
end
-
-
1
private
-
1
def self.status_table_form(task,opts,level=1,ndx_errors=nil)
-
ret = Array.new
-
task.set_and_return_types!()
-
el = task.hash_subset(:started_at,:ended_at)
-
-
duration = el[:ended_at] - el[:started_at] if el[:ended_at] && el[:started_at]
-
el[:duration] = "#{duration.round(DURATION_ACCURACY)}s" if duration
-
-
el[:status] = task[:status] unless task[:status] == 'created'
-
el[:id] = task[:id]
-
# For ALdin 'type' needs to be computed depeidningon whether it is a create node, craeet component or action
-
# also can be different depending on whether it is a group
-
qualified_index = QualifiedIndex.string_form(task)
-
# for space after qualified index if not empty
-
qualified_index += ' ' unless qualified_index.empty?
-
type = element_type(task,level)
-
# putting idents in
-
el[:type] = "#{' '*(2*(level-1))}#{qualified_index}#{type}"
-
el[:index], el[:sub_index] = qualified_index.split('.').collect(&:to_i)
-
ndx_errors ||= task.get_ndx_errors()
-
if ndx_errors[task[:id]]
-
el[:errors] = format_errors(ndx_errors[task[:id]])
-
end
-
-
task_logs = task.get_logs()
-
if task_logs && task_logs[task[:id]]
-
el[:logs] = format_logs(task_logs[task[:id]])
-
end
-
-
ea = nil
-
if level == 1
-
# no op
-
else
-
ea = task[:executable_action]
-
case task[:executable_action_type]
-
when "ConfigNode"
-
el.merge!(Task::Action::ConfigNode.status(ea,opts)) if ea
-
when "CreateNode"
-
el.merge!(Task::Action::CreateNode.status(ea,opts)) if ea
-
when "PowerOnNode"
-
el.merge!(Task::Action::PowerOnNode.status(ea,opts)) if ea
-
when "InstallAgent"
-
el.merge!(Task::Action::InstallAgent.status(ea,opts)) if ea
-
when "ExecuteSmoketest"
-
el.merge!(Task::Action::ExecuteSmoketest.status(ea,opts)) if ea
-
end
-
end
-
ret << el
-
-
subtasks = task.subtasks()
-
num_subtasks = subtasks.size
-
if num_subtasks > 0
-
if opts[:summarize_node_groups] and (ea and ea[:node].is_node_group?())
-
NodeGroupSummary.new(subtasks).add_summary_info!(el) do
-
subtasks.map{|st|status_table_form(st,opts,level+1)}.flatten(1)
-
end
-
else
-
ret += subtasks.sort{|a,b| (a[:position]||0) <=> (b[:position]||0)}.map do |st|
-
status_table_form(st,opts,level+1,ndx_errors)
-
end.flatten(1)
-
end
-
end
-
ret
-
end
-
-
1
def self.format_errors(errors)
-
ret = nil
-
errors.each do |error|
-
if ret
-
ret[:message] << "\n\n"
-
else
-
ret = {:message => String.new}
-
end
-
-
if error.is_a? String
-
error,temp = {},error
-
error[:message] = temp
-
end
-
-
error_msg = (error[:component] ? "Component #{error[:component].gsub("__","::")}: " : "")
-
error_msg << (error[:message]||"error")
-
ret[:message] << error_msg
-
ret[:type] = error[:type]
-
end
-
ret
-
end
-
-
1
def self.format_logs(logs)
-
ret = nil
-
message = ''
-
-
logs.each do |log|
-
unless ret
-
ret = {:message => String.new}
-
end
-
-
if log.is_a? String
-
log,temp = {},log
-
log[:message] = temp
-
end
-
-
if message.empty?
-
message << ("To see more detail about specific task action use 'task-action-detail <TASK NUMBER>'\n")
-
ret[:message] << message
-
ret[:label] = log[:label]
-
ret[:type] = log[:type]
-
end
-
end
-
-
ret
-
end
-
-
1
def self.element_type(task,level)
-
if level == 1
-
task[:display_name]
-
elsif type = task[:type]
-
node = (task[:executable_action]||{})[:node]
-
config_agent = task.get_config_agent_type(nil, {:no_error_if_nil => true})
-
-
if config_agent == 'dtk_provider'
-
if node && node.is_node_group?()
-
type = 'nodegroup actions'
-
else
-
type = 'action'
-
end
-
end
-
-
if ['configure_node','create_node'].include?(type)
-
type = "#{type}group" if node && node.is_node_group?()
-
end
-
-
type
-
else
-
task[:display_name]|| "top"
-
end
-
end
-
end
-
end; end; end
-
2
module DTK; class Task::Status
-
1
module TableForm
-
1
class NodeGroupSummary
-
1
def initialize(subtasks)
-
@subtasks = subtasks
-
end
-
1
def add_summary_info!(ng_table_el,&block_for_subtasks)
-
@block_for_subtasks = block_for_subtasks
-
if status = ng_table_el[:status]
-
case status
-
when "succeeded"
-
ng_table_el[:status] = status_when_succeeded()
-
when "executing"
-
ng_table_el[:status] = status_when_executing()
-
when "cancelled"
-
# no op
-
when "failed"
-
ng_table_el[:status] = status_when_failed()
-
errors = summarize_errors?()
-
ng_table_el[:errors] = errors unless errors.empty?
-
else
-
Log.error("Unexpected status #{status}")
-
end
-
end
-
ng_table_el
-
end
-
-
1
private
-
1
def status_when_succeeded()
-
status_with_subtask_size("succeeded")
-
end
-
-
1
def status_when_executing()
-
status_when_aux("executing")
-
end
-
-
1
def status_when_failed()
-
status_when_aux("failed")
-
end
-
-
1
def status_when_aux(status)
-
st_status_count = subtask_status_count()
-
if st_status_count.empty?
-
status
-
else
-
st_status_count.inject("") do |st,(status,count)|
-
status_string = status_with_subtask_size(status,count)
-
st.empty? ? status_string : "#{st},#{status_string}"
-
end
-
end
-
end
-
-
1
def status_with_subtask_size(status,count=nil)
-
"#{status}(#{(count||subtask_count()).to_s})"
-
end
-
1
def subtask_count()
-
@subtasks.size
-
end
-
-
1
def subtask_status_rows()
-
@subtask_status_rows ||= (@block_for_subtasks && @block_for_subtasks.call())||[]
-
end
-
-
1
def subtask_status_count()
-
ret = Hash.new
-
subtask_status_rows().each do |subtask_table_el|
-
if status = subtask_table_el[:status]
-
ret[status] ||= 0
-
ret[status] += 1
-
end
-
end
-
ret
-
end
-
-
1
def summarize_errors?()
-
all_errors = Array.new
-
subtask_status_rows().each do |st|
-
if errors = st[:errors]
-
if errors.kind_of?(Array)
-
all_errors += errors
-
else
-
all_errors << errors
-
end
-
end
-
end
-
summarize_errors(all_errors) unless all_errors.empty?
-
end
-
-
1
def summarize_errors(errors)
-
# assuming all fields are the same except :message
-
msgs_found = Array.new
-
errors.each do |err|
-
msg = err[:message]
-
if msg and !msg.empty?
-
unless msgs_found.include?(msg)
-
msgs_found << msg
-
end
-
end
-
end
-
if msgs_found.empty?
-
errors.first
-
else
-
summary_msg = (msgs_found.size == 1 ? msgs_found.first : "\n#{ErrIdent}#{msgs_found.join(ErrIdent)}")
-
errors.first.merge(:message => summary_msg)
-
end
-
end
-
1
ErrIdent = ' '
-
end
-
end
-
end; end
-
2
module DTK; class Task
-
1
class Template < Model
-
-
1
module ActionType
-
1
Create = "__create_action"
-
end
-
-
1
module Serialization
-
1
module Field
-
1
Subtasks = :subtasks
-
1
TemporalOrder = :subtask_order
-
1
ExecutionBlocks = :exec_blocks
-
end
-
1
module Constant
-
1
module Variations
-
end
-
1
extend Aux::ParsingingHelper::ClassMixin
-
-
1
ComponentGroup = :Component_group
-
-
1
Concurrent = :concurrent
-
1
Sequential = :sequential
-
1
OrderedComponents = :ordered_components
-
1
Components = :components
-
-
## TODO: above are in old form
-
-
1
Actions = 'actions'
-
-
1
AllApplicable = 'All_applicable'
-
1
Variations::AllApplicable = ['All_applicable','All','All_Applicable','AllApplicable']
-
-
1
Node = 'node'
-
1
Variations::Node = ['node','node_group']
-
1
NodeGroup = 'node_group'
-
-
1
Nodes = 'nodes'
-
1
Variations::Nodes = ['nodes'] #TODO: dont think we need this because single variation
-
-
1
Subtasks = 'subtasks'
-
end
-
-
-
# TODO: if support ruby 1.8.7 need to make this fn of a hash class that perserves order
-
1
class OrderedHash < ::Hash
-
1
def initialize(initial_val=nil)
-
super()
-
replace(initial_val) if initial_val
-
end
-
end
-
end
-
-
1
r8_nested_require('template','parsing_error')
-
1
r8_nested_require('template','temporal_constraint')
-
1
r8_nested_require('template','temporal_constraints')
-
1
r8_nested_require('template','action')
-
1
r8_nested_require('template','action_list')
-
1
r8_nested_require('template','stage')
-
1
r8_nested_require('template','content')
-
1
r8_nested_require('template','config_components')
-
1
r8_nested_require('template','task_params')
-
-
1
def self.common_columns()
-
1
[:id,:group_id,:display_name,:task_action,:content]
-
end
-
-
1
def self.default_task_action()
-
ActionType::Create
-
end
-
-
1
def serialized_content_hash_form(opts={})
-
if hash_content = get_field?(:content)
-
self.class.serialized_content_hash_form(hash_content,opts)
-
end
-
end
-
-
1
def self.serialized_content_hash_form(hash,opts={})
-
ret = Serialization::OrderedHash.new(hash)
-
if task_params = opts[:task_params]
-
ret = TaskParams.bind_task_params(ret,task_params)
-
end
-
ret
-
end
-
-
# returns [ref,create_hash]
-
1
def self.ref_and_create_hash(serialized_content,task_action=nil)
-
task_action ||= default_task_action()
-
ref = ref(task_action)
-
create_hash = {
-
:task_action => task_action,
-
:content => serialized_content
-
}
-
[ref,create_hash]
-
end
-
-
1
private
-
1
def self.ref(task_action)
-
task_action||default_task_action()
-
end
-
-
1
def self.create_or_update_from_serialized_content?(assembly_idh,serialized_content,task_action=nil)
-
if task_template = get_matching_task_template?(assembly_idh,task_action)
-
task_template.update(:content => serialized_content)
-
task_template.id_handle()
-
else
-
task_action ||= default_task_action()
-
ref,create_hash = ref_and_create_hash(serialized_content,task_action)
-
create_hash.merge!(:ref => ref,:component_component_id => assembly_idh.get_id())
-
task_template_mh = assembly_idh.create_childMH(:task_template)
-
create_from_row(task_template_mh,create_hash,:convert=>true)
-
end
-
end
-
-
1
def self.delete_task_template?(assembly_idh,task_action=nil)
-
if task_template = get_matching_task_template?(assembly_idh,task_action)
-
task_template_idh = task_template.id_handle()
-
delete_instance(task_template_idh)
-
task_template_idh
-
end
-
end
-
-
1
def self.get_matching_task_template?(assembly_idh,task_action=nil)
-
task_action ||= default_task_action()
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and,[:eq,:component_component_id,assembly_idh.get_id],
-
[:eq,:task_action,task_action]]
-
}
-
task_template_mh = assembly_idh.createMH(:model_name => :task_template,:parent_model_name => :assembly)
-
get_obj(task_template_mh,sp_hash)
-
end
-
end
-
end; end
-
3
module DTK; class Task; class Template
-
1
class Action
-
1
r8_nested_require('action','component_action')
-
1
r8_nested_require('action','action_method')
-
1
r8_nested_require('action','with_method')
-
-
# opts can have keys
-
# :index
-
# :parent_action
-
1
attr_accessor :index
-
1
def initialize(opts={})
-
@index = opts[:index] || opts[:parent_action] && opts[:parent_action].index
-
end
-
1
private :initialize
-
-
# opts can have keys
-
# :method_name
-
# :index
-
# :parent_action
-
-
1
def self.create(object,opts={})
-
if object.kind_of?(Component)
-
add_action_method?(ComponentAction.new(object,opts),opts)
-
elsif object.kind_of?(Action)
-
add_action_method?(object,opts)
-
else
-
raise Error.new("Not yet implemented treatment of action of type {#{object.class.to_s})")
-
end
-
end
-
-
1
def self.find_action_in_list?(serialized_item,node_name,action_list,opts={})
-
# method_name could be nil
-
ret = nil
-
component_name_ref,method_name = WithMethod.parse(serialized_item)
-
unless action = action_list.find_matching_action(node_name,:component_name_ref => component_name_ref)
-
if opts[:skip_if_not_found]
-
return ret
-
else
-
raise ParsingError.new("The component reference '#{component_name_ref}' on node '#{node_name}' in the workflow is not in the assembly; either add it to the assembly or delete it from the workflow")
-
end
-
end
-
-
if cgn = opts[:component_group_num]
-
action = action.in_component_group(cgn)
-
end
-
-
return create(action) unless method_name
-
-
action_defs = action[:action_defs]||[]
-
if action_def = action_defs.find{|ad|ad.get_field?(:method_name) == method_name}
-
return create(action,:action_def => action_def)
-
end
-
-
unless opts[:skip_if_not_found]
-
err_msg = "The action method '#{method_name}' is not defined on component '#{component_name_ref}'"
-
if action_defs.empty?
-
err_msg << "; there are no actions defiend on this component."
-
else
-
legal_methods = action_defs.map{|ad|ad[:method_name]}
-
err_msg << "; legal method names are: #{legal_methods.join(',')}"
-
end
-
raise ParsingError.new(err_msg)
-
end
-
end
-
-
1
def method_missing(name,*args,&block)
-
@action.send(name,*args,&block)
-
end
-
1
def respond_to?(name)
-
@action.respond_to?(name) || super
-
end
-
-
1
def method_name?()
-
if action_method = action_method?
-
action_method.method_name()
-
end
-
end
-
# this can be overwritten
-
1
def action_method?()
-
nil
-
end
-
-
1
private
-
1
def self.add_action_method?(base_action,opts={})
-
opts[:action_def] ? base_action.class::WithMethod.new(base_action,opts[:action_def]) : base_action
-
end
-
end
-
end; end; end
-
3
module DTK; class Task; class Template
-
1
class Action
-
1
class ActionMethod < Hash
-
1
def initialize(action_def)
-
super()
-
hash = {
-
:method_name => action_def.get_field?(:method_name),
-
:action_def_id => action_def.id()
-
}
-
replace(hash)
-
end
-
-
1
def method_name()
-
self[:method_name]
-
end
-
-
1
def config_agent_type()
-
ConfigAgent::Type::Symbol.dtk_provider
-
end
-
end
-
end
-
end; end; end
-
3
module DTK; class Task; class Template
-
1
class Action
-
1
class ComponentAction < self
-
1
r8_nested_require('component_action','in_component_group')
-
1
include InComponentGroupMixin
-
-
1
def initialize(component,opts={})
-
unless component[:node].kind_of?(Node)
-
raise Error.new("ComponentAction.new must be given component argument with :node key")
-
end
-
super(opts)
-
@component = component
-
end
-
1
private :initialize
-
-
1
def method_missing(name,*args,&block)
-
@component.send(name,*args,&block)
-
end
-
1
def respond_to?(name)
-
@component.respond_to?(name) || super
-
end
-
-
1
def node()
-
@component[:node]
-
end
-
1
def node_id()
-
if node = node()
-
node.get_field?(:id)
-
end
-
end
-
1
def node_name()
-
if node = node()
-
node.get_field?(:display_name)
-
end
-
end
-
-
1
def match_action?(action)
-
action.kind_of?(self.class) and
-
node_name() == action.node_name and
-
component_type() == action.component_type()
-
end
-
-
1
def match?(node_name,component_name_ref=nil)
-
ret =
-
if node_name() == node_name
-
if component_name_ref.nil?
-
true
-
else
-
# strip off node_name prefix if it exists
-
# need to handle cases like apt::ppa[ppa:chris/node.js]
-
component_name_ref_x = component_name_ref.gsub(/^[^\[]+\//,'')
-
component_name_ref_x == serialization_form(:no_node_name_prefix => true)
-
end
-
end
-
!!ret
-
end
-
-
1
def match_component_ref?(component_type,title=nil)
-
component_type == component_type(:without_title=>true) and
-
(title.nil? or title == component_title?())
-
end
-
-
1
def serialization_form(opts={})
-
if filter = opts[:filter]
-
if filter.keys == [:source]
-
return nil unless filter[:source] == source_type()
-
else
-
raise Error.new("Not treating filter of form (#{filter.inspect})")
-
end
-
end
-
node_name = ((!opts[:no_node_name_prefix]) && component()[:node][:display_name])
-
component_type = component_type()
-
node_name ? "#{node_name}/#{component_type}" : component_type
-
end
-
-
1
def source_type()
-
ret = (@component[:source]||{})[:type]
-
ret && ret.to_sym
-
end
-
-
1
def assembly_idh?()
-
if source_type() == :assembly
-
@component[:source][:object].id_handle()
-
end
-
end
-
-
1
def component_type(opts={})
-
cmp_type = Component.component_type_print_form(@component.get_field?(:component_type))
-
unless opts[:without_title]
-
if title = component_title?()
-
cmp_type = ComponentTitle.print_form_with_title(cmp_type,title)
-
end
-
end
-
cmp_type
-
end
-
-
1
def component_title?()
-
@component[:title]
-
end
-
-
end
-
end
-
end; end; end
-
4
module DTK; class Task; class Template; class Action
-
1
class ComponentAction
-
1
module InComponentGroupMixin
-
1
def in_component_group(component_group_num)
-
InComponentGroup.new(component_group_num,@component,self)
-
end
-
# overwritten by InComponentGroup
-
1
def component_group_num()
-
nil
-
end
-
end
-
1
class InComponentGroup < self
-
1
attr_reader :component_group_num
-
1
def initialize(component_group_num,component,parent_action)
-
super(component,:parent_action => parent_action)
-
@component_group_num = component_group_num
-
end
-
end
-
end
-
end; end; end; end
-
3
module DTK; class Task; class Template
-
1
class Action
-
# This represents an action with am explicit method on it method
-
1
class WithMethod < self
-
1
def initialize(action,action_def)
-
@action = action
-
@method = ActionMethod.new(action_def)
-
end
-
1
def action_method?()
-
@method
-
end
-
-
1
def method_missing(name,*args,&block)
-
@action.send(name,*args,&block)
-
end
-
1
def respond_to?(name)
-
@action.respond_to?(name) || super
-
end
-
-
# returns [component_name_ref,method_name] where method_name can be nil
-
1
def self.parse(serialized_item)
-
unless serialized_item.kind_of?(String)
-
raise_action_ref_error(serialized_item)
-
end
-
if info = has_explicit_method?(serialized_item)
-
[info[:component_name_ref],info[:method_name]]
-
else
-
[serialized_item,nil]
-
end
-
end
-
-
1
private
-
# returns hash with keys :component_name_ref,:method_name
-
# if has explicit method otherwise returns nil
-
# explicit form is
-
# component.method_name, or
-
# component[title].method_name
-
# complication is that title can have a '.' in it
-
1
def self.has_explicit_method?(serialized_item)
-
# case on whether has title
-
if serialized_item =~ /(^[^\[]+)\[([^\]]+)\](.*$)/
-
cmp_with_title = "#{$1}[#{$2}]"
-
dot_method = $3
-
if dot_method.empty?
-
nil
-
elsif dot_method =~ /^\.(.+$)/
-
method = $1
-
{:component_name_ref => cmp_with_title,:method_name => method}
-
else
-
raise_action_ref_error(serialized_item)
-
end
-
else
-
# no title
-
split = serialized_item.split('.')
-
case split.size
-
when 1
-
nil
-
when 2
-
{:component_name_ref => split[0], :method_name => split[1]}
-
else
-
raise_action_ref_error(serialized_item)
-
end
-
end
-
end
-
-
1
def self.raise_action_ref_error(serialized_item)
-
raise ParsingError.new("The action reference (#{serialized_item.inspect}) is ill-formed")
-
end
-
end
-
end
-
end; end; end
-
3
module DTK; class Task; class Template
-
1
class ActionList < ::Array
-
1
r8_nested_require('action_list','config_components')
-
-
1
def initialize(action_list=nil)
-
super()
-
@action_index = Hash.new
-
if action_list
-
action_list.each do |a|
-
unless i = a.index
-
raise Error.new("An action list passed into ActionList.new must have actions with set indexes")
-
end
-
@action_index[i] = a
-
self << a
-
end
-
end
-
end
-
-
# sets @action_index on self and sets index on action if not set already
-
# assumes that no indexs on action set or all or them, cannot be a mixture of these two cases
-
1
def set_action_indexes!()
-
each_with_index do |a,i|
-
a.index ||= i
-
@action_index[a.index] = a
-
end
-
self
-
end
-
-
1
def index(i)
-
@action_index[i]
-
end
-
-
1
def find_matching_node_id(node_name)
-
# there can be multiple matches, but first match is fine since they will all agree on node_id
-
if match = find_matching_action(node_name)
-
unless node_id = match.node_id()
-
Log.error("Unexpected that node id is nil for node name (#{node_name})")
-
end
-
node_id
-
end
-
end
-
-
1
def find_matching_action(node_name,opts={})
-
find{|a|a.match?(node_name,opts[:component_name_ref])}
-
end
-
-
1
def select(&block)
-
ret = self.class.new()
-
each{|el|ret << el if block.call(el)}
-
ret.set_action_indexes!()
-
end
-
-
1
def <<(el)
-
super(el.kind_of?(Action) ? el : Action.create(el))
-
end
-
-
end
-
end; end; end
-
3
module DTK; class Task; class Template
-
1
class ActionList
-
1
class ConfigComponents < self
-
1
def self.get(assembly,opts={})
-
# component_list_filter_proc includes clause to make sure no target refs
-
opts_assembly_cmps = {:seed => new(),:filter_proc => component_list_filter_proc(opts)}
-
assembly_cmps = assembly.get_component_info_for_action_list(opts_assembly_cmps)
-
# NodeGroup.get_component_info_for_action_list looks for any components in inventory node groups
-
ret = NodeGroup.get_component_info_for_action_list(assembly_cmps.nodes(),:add_on_to => assembly_cmps)
-
ret.set_action_indexes!()
-
end
-
end
-
-
1
def nodes()
-
ndx_ret = Hash.new
-
each do |r|
-
node = r[:node]
-
ndx_ret[node[:id]] ||= node
-
end
-
ndx_ret.values()
-
end
-
-
1
private
-
1
def self.component_list_filter_proc(opts={})
-
if cmp_type_filter = opts[:component_type_filter]
-
lambda{|el|(el[:node].nil? or !el[:node].is_target_ref?) and (el[:nested_component]||{})[:basic_type] == cmp_type_filter.to_s}
-
else
-
lambda{|el|el[:node].nil? or !el[:node].is_target_ref?}
-
end
-
end
-
end
-
end; end; end
-
-
-
2
module DTK; class Task
-
1
class Template
-
1
class ConfigComponents < self
-
1
r8_nested_require('config_components','persistence')
-
-
1
def self.update_when_added_component?(assembly,node,new_component,component_title,opts={})
-
# only updating the create action task template and only if it is persisted
-
assembly_cmp_actions = ActionList::ConfigComponents.get(assembly)
-
if task_template_content = get_template_content_aux?([:assembly],assembly,assembly_cmp_actions,nil,opts)
-
new_action = Action.create(new_component.merge(:node => node,:title => component_title))
-
gen_constraints_proc = proc{TemporalConstraints::ConfigComponents.get(assembly,assembly_cmp_actions)}
-
if updated_template_content = task_template_content.insert_action?(new_action,assembly_cmp_actions,gen_constraints_proc)
-
Persistence::AssemblyActions.persist(assembly,updated_template_content)
-
end
-
end
-
end
-
-
1
def self.update_when_deleted_component?(assembly,node,component)
-
# TODO: currently only updating the create action task template and only if it is persisted
-
# makes sense to also automtically delete component in other actions
-
assembly_cmp_actions = ActionList::ConfigComponents.get(assembly)
-
if task_template_content = get_template_content_aux?([:assembly],assembly,assembly_cmp_actions)
-
action_to_delete = Action.create(component.add_title_field?().merge(:node => node))
-
if updated_template_content = task_template_content.delete_explicit_action?(action_to_delete,assembly_cmp_actions)
-
Persistence::AssemblyActions.persist(assembly,updated_template_content)
-
end
-
end
-
end
-
-
# TODO: do more accurate parse if assembly is non null
-
1
def self.find_parse_errors(hash_content,assembly=nil)
-
begin
-
cmp_actions = (assembly && ActionList::ConfigComponents.get(assembly))
-
serialized_content = serialized_content_hash_form(Aux.convert_keys_to_symbols_recursive(hash_content))
-
Content.parse_and_reify(serialized_content,cmp_actions)
-
rescue ParsingError => parse_error
-
return parse_error
-
end
-
nil
-
end
-
-
# action_types is scalar or array with elements
-
# :assembly
-
# :node_centric
-
1
def self.get_or_generate_template_content(action_types,assembly,opts={})
-
action_types = Array(action_types)
-
raise_error_if_unsupported_action_types(action_types)
-
-
task_action = opts[:task_action]
-
opts_action_list = Aux.hash_subset(opts,[:component_type_filter])
-
cmp_actions = ActionList::ConfigComponents.get(assembly,opts_action_list)
-
-
# first see if there is a persistent serialized task template for assembly instance and that it should be used
-
opts_get_template = Aux.hash_subset(opts,[:task_params])
-
if template_content = get_template_content_aux?(action_types,assembly,cmp_actions,task_action,opts_get_template)
-
return template_content
-
end
-
-
# otherwise do the temporal processing to generate template_content
-
opts_generate = (node_centric_first_stage?() ? {:node_centric_first_stage => true} : Hash.new)
-
template_content = generate_from_temporal_contraints([:assembly,:node_centric],assembly,cmp_actions,opts_generate)
-
-
unless opts[:serialized_form]
-
# persist assembly action part of what is generated
-
Persistence::AssemblyActions.persist(assembly,template_content,task_action)
-
end
-
-
template_content
-
end
-
-
1
private
-
1
def self.raise_error_if_unsupported_action_types(action_types)
-
unless action_types.include?(:assembly)
-
raise Error.new("Not supported when action types does not contain :assembly")
-
end
-
illegal_action_types = (action_types - [:assembly,:node_centric])
-
unless illegal_action_types.empty?
-
raise Error.new("Illegal action type(s) (#{illegal_action_types.join(',')})")
-
end
-
end
-
1
def self.node_centric_first_stage?()
-
true
-
end
-
-
1
def self.get_template_content_aux?(action_types,assembly,cmp_actions,task_action=nil,opts={})
-
if assembly_action_content = Persistence::AssemblyActions.get_content_for(assembly,cmp_actions,task_action,opts)
-
if action_types == [:assembly]
-
assembly_action_content
-
else #action_types has both and assembly and node_centric
-
node_centric_content = generate_from_temporal_contraints(:node_centric,assembly,cmp_actions)
-
if node_centric_content.empty?
-
assembly_action_content
-
else
-
opts_splice = (node_centric_first_stage?() ? {:node_centric_first_stage => true} : Hash.new)
-
assembly_action_content.splice_in_at_beginning!(node_centric_content,opts_splice)
-
end
-
end
-
end
-
end
-
-
1
def self.generate_from_temporal_contraints(action_types,assembly,cmp_actions,opts={})
-
action_types = Array(action_types)
-
relevant_actions =
-
if action_types == [:assembly]
-
cmp_actions.select{|a|a.source_type() == :assembly}
-
elsif action_types == [:node_centric]
-
cmp_actions.select{|a|a.source_type() == :node_group}
-
else #action_types consists of :assembly and :node_centric
-
cmp_actions
-
end
-
temporal_constraints = TemporalConstraints::ConfigComponents.get(assembly,relevant_actions)
-
Content.new(temporal_constraints,relevant_actions,opts)
-
end
-
-
end
-
end
-
end; end
-
# methods used to maintain the peristence of an assembly instance task template
-
# The content can be both node centeric and assembly actions; the class Persistence is responsible for both
-
# and class AssemblyActions is responsible for just the assembly actions
-
4
module DTK; class Task; class Template; class ConfigComponents
-
1
class Persistence
-
1
class AssemblyActions
-
1
def self.get_content_for(assembly,cmp_actions,task_action=nil,opts={})
-
# if task_params given cant use ReifiedObjectCache because params can differ from call to call
-
unless opts[:serialized_form] or opts[:task_params]
-
if ret = ReifiedObjectCache.get(assembly,task_action)
-
return ret
-
end
-
end
-
-
if serialized_content = get_serialized_content_from_assembly(assembly,task_action,opts)
-
if opts[:serialized_form]
-
Content.reify(serialized_content)
-
else
-
Content.parse_and_reify(serialized_content,cmp_actions,opts)
-
end
-
else
-
# raise error if explicit task_action is given and cant be found
-
if task_action
-
raise ErrorUsage.new("The Workflow action '#{task_action}' does not exist")
-
end
-
end
-
end
-
-
1
def self.persist(assembly,template_content,task_action=nil)
-
if serialized_content = template_content.serialization_form(:allow_empty_task => true,:filter => {:source => :assembly})
-
task_template_idh = Template.create_or_update_from_serialized_content?(assembly.id_handle(),serialized_content,task_action)
-
ReifiedObjectCache.add_or_update_item(task_template_idh,template_content)
-
else
-
if task_template_idh = Template.delete_task_template?(assembly.id_handle(),task_action)
-
ReifiedObjectCache.remove_item(task_template_idh)
-
end
-
end
-
end
-
-
1
def self.remove_any_outdated_items(assembly_update)
-
ReifiedObjectCache.remove_any_outdated_items(assembly_update)
-
end
-
-
1
private
-
1
def self.get_serialized_content_from_assembly(assembly,task_action=nil,opts={})
-
ret = assembly.get_task_template(task_action)
-
ret && ret.serialized_content_hash_form(opts)
-
end
-
-
1
class ReifiedObjectCache
-
# using task_template_id is cache key
-
1
@@cache = Hash.new
-
-
###TODO: these are in no op mode until implemement
-
1
def self.get(assembly,task_action=nil)
-
# TODO: stub; nothing in cache
-
nil
-
end
-
1
def self.add_or_update_item(task_template_idh,content)
-
#@@cache[key(task_template_idh)] = content
-
end
-
-
1
def self.remove_item(task_template_idh)
-
#@@cache.delete(key(task_template_idh))
-
end
-
###TODO: end: these are in no op mode until implememnt
-
-
-
1
def self.remove_any_outdated_items(assembly_update)
-
find_impacted_template_idhs(assembly_update).each{|idh|delete_item?(idh)}
-
end
-
-
1
private
-
1
def self.delete_item?(task_template_idh)
-
key = key(task_template_idh)
-
if @@cache.hash_key?(key)
-
@@cache.delete(key)
-
end
-
end
-
-
1
def self.key(task_template_idh)
-
task_template_idh.get_id()
-
end
-
-
1
def self.find_impacted_template_idhs(assembly_update)
-
ret = Array.new
-
all_templates = assembly_update.assembly_instance().get_task_templates()
-
return ret if all.empty?
-
-
all_templates.select{|tt|should_be_removed?(tt,assembly_update)}.map{|tt|tt.id_handle()}
-
end
-
-
1
def self.should_be_removed?(task_template,assembly_update)
-
# TODO: stub: conservative clear everything
-
true
-
end
-
-
end
-
end
-
end
-
end; end; end; end
-
2
module DTK; class Task
-
1
class Template
-
1
class Content < Array
-
1
r8_nested_require('content','insert_action_helper')
-
1
r8_nested_require('content','action_match')
-
-
1
include Serialization
-
1
include Stage::InterNode::Factory::StageName
-
-
1
def initialize(object,actions,opts={})
-
super()
-
create_stages!(object,actions,opts)
-
end
-
-
1
def create_subtask_instances(task_mh,assembly_idh)
-
ret = Array.new
-
return ret if empty?()
-
all_actions = Array.new
-
each_internode_stage do |internode_stage,stage_index|
-
task_hash = {
-
:display_name => internode_stage.name || DefaultNameProc.call(stage_index,size == 1),
-
:temporal_order => "concurrent"
-
}
-
internode_stage_task = Task.create_stub(task_mh,task_hash)
-
all_actions += internode_stage.add_subtasks!(internode_stage_task,stage_index,assembly_idh)
-
ret << internode_stage_task
-
end
-
attr_mh = task_mh.createMH(:attribute)
-
Task::Action::ConfigNode.add_attributes!(attr_mh,all_actions)
-
ret
-
end
-
-
# if action is not included in task template than insert the action in this object and return updated object
-
# else return nil
-
1
def insert_action?(new_action,action_list,gen_constraints_proc)
-
insert_action_helper = InsertActionHelper.create(new_action,action_list,gen_constraints_proc)
-
insert_action_helper.insert_action?(self)
-
end
-
-
# if action is explicitly included in task template then delete the action from this object and return updated object
-
# else return nil
-
1
def delete_explicit_action?(action,action_list)
-
if indexed_action = action_list.find{|a|a.match_action?(action)}
-
if action_match = includes_action?(indexed_action)
-
unless action_match.in_multinode_stage
-
delete_action!(action_match)
-
self
-
end
-
end
-
end
-
end
-
-
1
def splice_in_action!(action_match,insert_point)
-
case insert_point
-
when :before_internode_stage
-
if action_match.internode_stage_index == 1
-
new_internode_stage = Stage::InterNode.create_from_single_action(action_match.insert_action)
-
insert(action_match.internode_stage_index-1,new_internode_stage)
-
else
-
internode_stage(action_match.internode_stage_index).splice_in_action!(action_match,:end_last_execution_block)
-
end
-
when :before_action_pos
-
internode_stage(action_match.internode_stage_index).splice_in_action!(action_match,:before_action_pos)
-
# TODO: currently this cannot be reached because using :add_as_new_last_internode_stage instead
-
# if leave below in make it so under more cases a new stage is created
-
when :end_last_internode_stage
-
last_internode_stage = internode_stage(:last)
-
# create new stage if last_internode_stage is
-
# - multi node, or
-
# - has explicit actions
-
if last_internode_stage.kind_of?(Stage::InterNode::MultiNode) or
-
last_internode_stage.has_action_with_method?()
-
new_internode_stage = Stage::InterNode.create_from_single_action(action_match.insert_action)
-
self << new_internode_stage
-
else
-
last_internode_stage.splice_in_action!(action_match,:end_last_execution_block)
-
end
-
when :add_as_new_last_internode_stage
-
new_internode_stage = Stage::InterNode.create_from_single_action(action_match.insert_action)
-
self << new_internode_stage
-
else raise Error.new("Unexpected insert_point (#{insert_point})")
-
end
-
end
-
# TODO: have above subsume below
-
1
def splice_in_at_beginning!(template_content,opts={})
-
if opts[:node_centric_first_stage]
-
insert(0,*template_content)
-
else
-
unless template_content.size == 1
-
raise ErrorUsage.new("Can only splice in template content that has a single inter node stage")
-
end
-
first.splice_in_at_beginning!(template_content.first)
-
end
-
self
-
end
-
-
1
def serialization_form(opts={})
-
ret = nil
-
subtasks = map{|internode_stage|internode_stage.serialization_form(opts)}.compact
-
if subtasks.empty?()
-
if opts[:allow_empty_task]
-
return ret
-
else
-
raise ErrorUsage.new("The task has no actions")
-
end
-
end
-
# Dont put in sequential block if just single stage
-
if subtasks.size == 1
-
subtasks.first.delete(:name)
-
subtasks.first
-
else
-
{
-
Field::TemporalOrder => Constant::Sequential,
-
Field::Subtasks => subtasks
-
}
-
end
-
end
-
-
1
def self.reify(serialized_content)
-
RawForm.new(serialized_content)
-
end
-
-
1
class RawForm
-
1
def serialization_form(opts={})
-
@serialized_content
-
end
-
1
def initialize(serialized_content)
-
@serialized_content = serialized_content
-
end
-
end
-
-
1
def self.parse_and_reify(serialized_content,actions,opts={})
-
# normalize to handle case where single stage; test for single stage is whethet serialized_content[Field::TemporalOrder] == Constant::Sequential
-
temporal_order = serialized_content[Field::TemporalOrder]
-
has_multi_internode_stages = (temporal_order and (temporal_order.to_sym == Constant::Sequential))
-
subtasks = serialized_content[Field::Subtasks]
-
normalized_subtasks =
-
if subtasks
-
has_multi_internode_stages ? subtasks : [{Field::Subtasks => subtasks}]
-
else
-
[serialized_content]
-
end
-
new(SerializedContentArray.new(normalized_subtasks),actions,opts)
-
end
-
-
1
class SerializedContentArray < Array
-
1
def initialize(array)
-
super()
-
array.each{|a|self << a}
-
end
-
end
-
-
1
def each_internode_stage(&block)
-
each_with_index{|internode_stage,i|block.call(internode_stage,i+1)}
-
end
-
-
1
def add_ndx_action_index!(hash,action)
-
self.class.add_ndx_action_index!(hash,action)
-
end
-
1
def self.add_ndx_action_index!(hash,action)
-
(hash[action.node_id] ||= Array.new) << action.index
-
hash
-
end
-
-
1
def includes_action?(action)
-
ndx_action_indexes = add_ndx_action_index!(Hash.new,action)
-
return nil if ndx_action_indexes.empty?()
-
each_internode_stage do |internode_stage,stage_index|
-
action_match = ActionMatch.new(action)
-
if internode_stage.find_earliest_match?(action_match,ndx_action_indexes)
-
action_match.internode_stage_index = stage_index
-
return action_match
-
end
-
end
-
nil
-
end
-
-
1
private
-
-
1
def delete_action!(action_match)
-
internode_stage_index = action_match.internode_stage_index
-
if :empty == internode_stage(internode_stage_index).delete_action!(action_match)
-
delete_internode_stage!(internode_stage_index)
-
:empty if empty?()
-
end
-
end
-
-
1
def internode_stage(internode_stage_index)
-
if internode_stage_index == :last
-
last()
-
else
-
self[internode_stage_index-1]
-
end
-
end
-
-
1
def delete_internode_stage!(internode_stage_index)
-
delete_at(internode_stage_index-1)
-
end
-
-
1
def create_stages!(object,actions,opts={})
-
if object.kind_of?(TemporalConstraints)
-
create_stages_from_temporal_constraints!(object,actions,opts)
-
elsif object.kind_of?(SerializedContentArray)
-
create_stages_from_serialized_content!(object,actions,opts)
-
else
-
raise Error.new("create_stages! does not treat argument of type (#{object.class})")
-
end
-
end
-
-
1
def create_stages_from_serialized_content!(serialized_content_array,actions,opts={})
-
serialized_content_array.each do |a|
-
if stage = Stage::InterNode.parse_and_reify?(a,actions,opts)
-
self << stage
-
end
-
end
-
end
-
-
1
def create_stages_from_temporal_constraints!(temporal_constraints,actions,opts={})
-
default_stage_name_proc = {:internode_stage_name_proc => DefaultNameProc}
-
if opts[:node_centric_first_stage]
-
node_centric_actions = actions.select{|a|a.source_type() == :node_group}
-
# TODO: get :internode_stage_name_proc from node group field :task_template_stage_name
-
opts_x = {:internode_stage_name_proc => DefaultNodeGroupNameProc}.merge(opts)
-
create_stages_from_temporal_constraints_aux!(temporal_constraints, node_centric_actions,opts_x)
-
-
assembly_actions = actions.select{|a|a.source_type() == :assembly}
-
create_stages_from_temporal_constraints_aux!(temporal_constraints,assembly_actions,default_stage_name_proc.merge(opts))
-
else
-
create_stages_from_temporal_constraints_aux!(temporal_constraints,actions,default_stage_name_proc.merge(opts))
-
end
-
end
-
-
1
def create_stages_from_temporal_constraints_aux!(temporal_constraints,actions,opts={})
-
return if actions.empty?
-
inter_node_constraints = temporal_constraints.select{|tc|tc.inter_node?()}
-
-
stage_factory = Stage::InterNode::Factory.new(actions,temporal_constraints)
-
before_index_hash = inter_node_constraints.create_before_index_hash(actions)
-
done = false
-
existing_num_stages = size()
-
new_stages = Array.new
-
# before_index_hash gets destroyed in while loop
-
while not done do
-
if before_index_hash.empty?
-
done = true
-
else
-
stage_action_indexes = before_index_hash.ret_and_remove_actions_not_after_any!()
-
if stage_action_indexes.empty?()
-
# TODO: see if any other way there can be loops
-
raise ErrorUsage.new("Loop detected in temporal orders")
-
end
-
internode_stage = stage_factory.create(stage_action_indexes)
-
self << internode_stage
-
new_stages << internode_stage
-
end
-
end
-
set_internode_stage_names!(new_stages,opts[:internode_stage_name_proc])
-
self
-
end
-
-
1
def set_internode_stage_names!(new_stages,internode_stage_name_proc)
-
return unless internode_stage_name_proc
-
is_single_stage = (new_stages.size() == 1)
-
new_stages.each_with_index do |internode_stage,i|
-
unless internode_stage.name
-
stage_index = i+1
-
internode_stage.name = internode_stage_name_proc.call(stage_index,is_single_stage)
-
end
-
end
-
end
-
-
end
-
end
-
end; end
-
3
module DTK; class Task; class Template
-
1
class Content
-
1
class ActionMatch
-
1
def initialize(insert_action=nil)
-
@insert_action = insert_action
-
# the rest of these attributes are about what matched
-
@action = nil
-
@in_multinode_stage = nil
-
@internode_stage_index = nil
-
@execution_block_index = nil
-
@action_position = nil
-
end
-
1
attr_accessor :insert_action,:action,:internode_stage_index,:execution_block_index,:action_position,:in_multinode_stage
-
1
def node_id()
-
@action && @action.node_id()
-
end
-
1
def match_found?()
-
!@action.nil?
-
end
-
end
-
end
-
end;end;end
-
-
3
module DTK; class Task; class Template
-
1
class Content
-
1
class InsertActionHelper
-
1
r8_nested_require('insert_action_helper','insert_at_end')
-
-
1
def self.create(new_action,action_list,gen_constraints_proc,insert_strategy=nil)
-
insert_strategy_class(insert_strategy).new(new_action,action_list,gen_constraints_proc)
-
end
-
-
1
def insert_action?(template_content)
-
unless template_content.includes_action?(@new_action)
-
compute_before_after_relations!()
-
insert_action!(template_content)
-
end
-
end
-
-
1
private
-
1
def initialize(new_action,action_list,gen_constraints_proc,insert_strategy=nil)
-
@new_action = action_list.find{|a|a.match_action?(new_action)}
-
@new_action_node_id = new_action.node_id
-
@gen_constraints_proc = gen_constraints_proc
-
@ndx_action_indexes = NdxActionIndexes.new()
-
end
-
-
1
class NdxActionIndexes < Hash
-
# These are of form
-
#[:internode|:samenode][:before|:after]
-
# which has value {node_id => [action_indexex],,,}
-
1
def get(inter_or_same,before_or_after)
-
(self[inter_or_same]||{})[before_or_after]||{}
-
end
-
1
def add(inter_or_same,before_or_after,action)
-
pntr = ((self[inter_or_same] ||= Hash.new)[before_or_after] ||= Hash.new)
-
Content.add_ndx_action_index!(pntr,action)
-
self
-
end
-
end
-
-
1
def self.insert_strategy_class(insert_strategy=nil)
-
# default insert strategy is to put the new action in the latest existing internode stage at the latest point
-
if insert_strategy
-
unless ret = InsertStrategies[insert_strategy]
-
raise Error.new("Illegal insert action strategy (#{insert_strategy})")
-
end
-
ret
-
else
-
InsertAtEnd
-
end
-
end
-
-
1
InsertStrategies = {
-
:insert_at_end => InsertAtEnd
-
}
-
-
1
def compute_before_after_relations!()
-
unless new_action_index = @new_action.index
-
# if @new_action does not have an index it means that it is not in action list
-
Log.error("Cannot find action in action list; using no constraints")
-
return
-
end
-
-
temporal_constraints = @gen_constraints_proc.call()
-
return if temporal_constraints.empty?
-
-
temporal_constraints.each do |tc|
-
if tc.before_action_index == new_action_index
-
after_action = tc.after_action
-
if after_action.node_id == @new_action_node_id
-
add_ndx_action_index(:samenode,:after,after_action)
-
else
-
add_ndx_action_index(:internode,:after,after_action)
-
end
-
elsif tc.after_action_index == new_action_index
-
before_action = tc.before_action
-
if before_action.node_id == @new_action_node_id
-
add_ndx_action_index(:samenode,:before,before_action)
-
else
-
add_ndx_action_index(:internode,:before,before_action)
-
end
-
end
-
end
-
end
-
-
1
def get_ndx_action_indexes(inter_or_same,before_or_after)
-
@ndx_action_indexes.get(inter_or_same,before_or_after)
-
end
-
1
def add_ndx_action_index(inter_or_same,before_or_after,action)
-
@ndx_action_indexes.add(inter_or_same,before_or_after,action)
-
end
-
-
end
-
end
-
end;end;end
-
3
module DTK; class Task; class Template
-
1
class Content
-
1
class InsertActionHelper
-
1
class InsertAtEnd < self
-
1
def insert_action!(template_content)
-
template_content.each_internode_stage do |internode_stage,stage_index|
-
if action_match = find_earliest_match?(internode_stage,stage_index,:internode,:after)
-
# if match here then need to put in stage earlier than matched one
-
template_content.splice_in_action!(action_match,:before_internode_stage)
-
return template_content
-
end
-
if action_match = find_earliest_match?(internode_stage,stage_index,:samenode,:after)
-
# if match here then need to put in this stage earlier than matched one
-
template_content.splice_in_action!(action_match,:before_action_pos)
-
return template_content
-
end
-
end
-
action_match = ActionMatch.new(@new_action)
-
# TODO: was using
-
# template_content.splice_in_action!(action_match,:end_last_internode_stage)
-
# but switched to below because it would group together actions in same stage that probably should be seperate stages
-
template_content.splice_in_action!(action_match,:add_as_new_last_internode_stage)
-
template_content
-
end
-
end
-
-
1
def find_earliest_match?(internode_stage,stage_index,inter_or_same,before_or_after)
-
ndx_action_indexes = get_ndx_action_indexes(inter_or_same,before_or_after)
-
return nil if ndx_action_indexes.empty?()
-
action_match = ActionMatch.new(@new_action)
-
if internode_stage.find_earliest_match?(action_match,ndx_action_indexes)
-
action_match.internode_stage_index = stage_index
-
action_match
-
end
-
end
-
end
-
end
-
end;end;end
-
2
module DTK; class Task
-
1
class Template
-
1
class ParsingError < ErrorUsage::Parsing
-
1
def initialize(msg,*args_x)
-
args = Params.add_opts(args_x,:error_prefix => ErrorPrefix,:caller_info => true)
-
super(msg,*args)
-
end
-
1
ErrorPrefix = 'Workflow parsing error'
-
end
-
end
-
end; end
-
-
-
2
module DTK; class Task
-
1
class Template
-
1
class Stage
-
1
r8_nested_require('stage','inter_node')
-
1
r8_nested_require('stage','intra_node')
-
end
-
end
-
end; end
-
3
module DTK; class Task; class Template
-
1
class Stage
-
1
class InterNode < Hash
-
1
r8_nested_require('inter_node','factory')
-
1
r8_nested_require('inter_node','multi_node')
-
1
include Serialization
-
-
1
def initialize(name=nil)
-
super()
-
@name = name
-
end
-
1
attr_accessor :name
-
-
1
def self.create_from_single_action(action)
-
new(stage_name(action)).add_new_execution_block_for_action!(action)
-
end
-
-
1
def self.stage_name(action)
-
"component #{action.component_type()}"
-
end
-
1
private_class_method :stage_name
-
-
# returns all actions generated
-
1
def add_subtasks!(parent_task,internode_stage_index,assembly_idh=nil)
-
ret = Array.new
-
each_node_actions do |node_actions|
-
if action = node_actions.add_subtask!(parent_task,internode_stage_index,assembly_idh)
-
ret << action
-
end
-
end
-
ret
-
end
-
-
1
def find_earliest_match?(action_match,ndx_action_indexes)
-
ndx_action_indexes.each_pair do |node_id,action_indexes|
-
if node_actions = self[node_id]
-
if node_actions.find_earliest_match?(action_match,action_indexes)
-
action_match.in_multinode_stage = true if kind_of?(MultiNode)
-
return true
-
end
-
end
-
end
-
false
-
end
-
-
1
def has_action_with_method?()
-
!!values.find{|node_actions|node_actions.has_action_with_method?()}
-
end
-
-
1
def delete_action!(action_match)
-
node_id = action_match.action.node_id
-
unless node_action = self[node_id]
-
raise Error.new("Unexepected that no node action can be found")
-
end
-
if :empty == node_action.delete_action!(action_match)
-
delete(node_id)
-
:empty if empty?()
-
end
-
end
-
-
1
def splice_in_action!(action_match,insert_point)
-
unless node_id = action_match.insert_action.node_id
-
raise Error.new("Unexepected that node_id is nil")
-
end
-
case insert_point
-
when :end_last_execution_block
-
if node_action = self[node_id]
-
node_action.splice_in_action!(action_match,insert_point)
-
else
-
add_new_execution_block_for_action!(action_match.insert_action)
-
end
-
when :before_action_pos
-
unless node_action = self[node_id]
-
raise Error.new("Illegal node_id (#{action_match.node_id})")
-
end
-
node_action.splice_in_action!(action_match,insert_point)
-
else raise Error.new("Unexpected insert_point (#{insert_point})")
-
end
-
end
-
# TODO: have above subsume below
-
1
def splice_in_at_beginning!(internode_stage)
-
ndx_splice_in_node_ids = internode_stage.node_ids().inject(Hash.new){|h,node_id|h.merge(node_id => true)}
-
each_node_id do |node_id|
-
if matching = internode_stage[node_id]
-
self[node_id].splice_in_at_beginning!(matching)
-
ndx_splice_in_node_ids.delete(node_id)
-
end
-
end
-
ndx_splice_in_node_ids.keys.each do |node_id|
-
merge!(node_id => internode_stage[node_id])
-
end
-
self
-
end
-
-
1
def serialization_form(opts={})
-
subtasks = map_node_actions{|node_actions|node_actions.serialization_form(opts)}.compact
-
return nil if subtasks.empty?
-
-
ret = serialized_form_with_name()
-
-
# Dont put in concurrent block if there is just one node
-
if subtasks.size == 1
-
ret.merge(subtasks.first)
-
else
-
ret.merge(Field::TemporalOrder => Constant::Concurrent, Field::Subtasks => subtasks)
-
end
-
end
-
# action_list nil can be passed if just concerned with parsing
-
1
def self.parse_and_reify?(serialized_content,action_list,opts={})
-
# content could be either
-
# 1) a concurrent block with multiple nodes,
-
# 2) a single node,
-
# 3) a multi-node specification
-
-
if multi_node_type = parse_and_reify_is_multi_node_type?(serialized_content)
-
return MultiNode.parse_and_reify(multi_node_type,serialized_content,action_list)
-
end
-
-
normalized_content = serialized_content[Field::Subtasks]||[serialized_content]
-
ret = normalized_content.inject(new(serialized_content[:name])) do |h,serialized_node_actions|
-
unless node_name = Constant.matches?(serialized_node_actions,:Node)
-
if Constant.matches?(serialized_node_actions,:Nodes)
-
raise ParsingError.new("Within nested subtask only '#{Constant::Node}' and not '#{Constant::Nodes}' keyword can be used")
-
end
-
raise ParsingError.new("Missing node reference in: ?1",serialized_node_actions)
-
end
-
node_id = 0 #dummy value when just used for parsing
-
if action_list
-
unless node_id = action_list.find_matching_node_id(node_name)
-
raise ParsingError.new("The following element(s) cannot be resolved with respect to the assembly's nodes and components: ?1",serialized_content)
-
end
-
end
-
node_actions = parse_and_reify_node_actions?(serialized_node_actions,node_name,node_id,action_list,opts)
-
node_actions ? h.merge(node_actions) : {}
-
end
-
!ret.empty? && ret
-
end
-
-
1
def add_new_execution_block_for_action!(action)
-
# leveraging Stage::IntraNode::ExecutionBlocks.parse_and_reify(node_actions,node_name,action_list) for this
-
node_actions = {Constant::OrderedComponents => [action.component_type()]}
-
node_name = action.node_name()
-
action_list = ActionList.new([action])
-
merge!(action.node_id => Stage::IntraNode::ExecutionBlocks.parse_and_reify(node_actions,node_name,action_list))
-
end
-
-
1
def each_node_id(&block)
-
each_key{|node_id|block.call(node_id)}
-
end
-
1
def node_ids()
-
keys()
-
end
-
-
1
private
-
1
def serialized_form_with_name()
-
@name ? OrderedHash.new(:name => @name) : OrderedHash.new
-
end
-
-
1
def self.parse_and_reify_is_multi_node_type?(serialized_content)
-
# only look at leaf subtasks tasks
-
unless leaf_subtask?(serialized_content)
-
if ret = Constant.matches?(serialized_content,:Nodes)
-
ret
-
elsif !Constant.matches?(serialized_content,:Node)
-
Constant::AllApplicable
-
end
-
end
-
end
-
-
1
def self.leaf_subtask?(serialized_content)
-
Constant.matches?(serialized_content,:Subtasks)
-
end
-
-
1
def self.parse_and_reify_node_actions?(node_actions,node_name,node_id,action_list,opts={})
-
exec_blocks = Stage::IntraNode::ExecutionBlocks.parse_and_reify(node_actions,node_name,action_list,opts)
-
# remove empty blocks
-
exec_blocks.reject!{|exec_block|exec_block.empty?}
-
unless exec_blocks.empty?
-
{node_id => exec_blocks}
-
end
-
end
-
-
1
def each_node_actions(&block)
-
each_value{|node_actions|block.call(node_actions)}
-
end
-
-
1
def map_node_actions(&block)
-
values.map{|node_actions|block.call(node_actions)}
-
end
-
end
-
end
-
end; end; end
-
-
4
module DTK; class Task; class Template; class Stage
-
1
class InterNode
-
1
class Factory
-
1
def initialize(action_list,temporal_constraints)
-
@action_list = action_list
-
@temporal_constraints = temporal_constraints
-
end
-
-
1
def create(stage_action_indexes,name=nil)
-
# first break each state into unordered list per node
-
ret = InterNode.new(name)
-
stage_action_indexes.each do |i|
-
action = @action_list.index(i)
-
(ret[action.node_id] ||= IntraNode::Unordered.new()) << action
-
end
-
-
intra_node_proc = Stage::IntraNode::Processor.new(@temporal_constraints)
-
ret.each_node_id{|node_id|ret[node_id] = intra_node_proc.process(ret[node_id])}
-
ret
-
end
-
-
1
module StageName
-
1
DefaultNameProc = lambda do |index,is_single_stage|
-
ret = "configure_nodes"
-
is_single_stage ? ret : (ret + "_stage_#{index.to_s}")
-
end
-
-
1
DefaultNodeGroupNameProc = lambda do |index,is_single_stage|
-
ret = "config_node_group_components"
-
is_single_stage ? ret : (ret + "_stage_#{index.to_s}")
-
end
-
end
-
-
end
-
end
-
end; end; end; end
-
-
-
4
module DTK; class Task; class Template; class Stage
-
1
class InterNode
-
1
class MultiNode < self
-
1
def initialize(serialized_multinode_action)
-
super(serialized_multinode_action[:name])
-
unless @ordered_components = components_or_actions(serialized_multinode_action)
-
all_legal = Constant.all_string_variations(*ComponantOrActionConstants).join(',')
-
msg = "Missing Component or Action field (#{all_legal})"
-
if name = serialized_multinode_action[:name]
-
msg << " in stage '#{name}'"
-
end
-
raise ParsingError.new(msg)
-
end
-
end
-
-
1
def serialization_form(opts={})
-
if opts[:form] == :explicit_instances
-
super
-
else
-
serialized_form_with_name().merge(:nodes => serialized_multi_node_type(),Constant::OrderedComponents => @ordered_components)
-
end
-
end
-
-
1
def self.parse_and_reify(multi_node_type,serialized_multinode_action,action_list)
-
klass(multi_node_type).new(serialized_multinode_action).parse_and_reify!(action_list)
-
end
-
-
1
private
-
1
ComponantOrActionConstants = [:OrderedComponents,:Components,:Actions]
-
1
def components_or_actions(serialized_el)
-
if match = ComponantOrActionConstants.find{|k|Constant.matches?(serialized_el,k)}
-
Constant.matches?(serialized_el,match)
-
end
-
end
-
-
1
def self.klass(multi_node_type)
-
if Constant.matches?(multi_node_type,:AllApplicable)
-
Applicable
-
else
-
raise ParsingError.new("Illegal multi node type (#{multi_node_type}); #{Constant.its_legal_values(:AllApplicable)}")
-
end
-
end
-
-
# This is used to include all applicable classes
-
1
class Applicable < self
-
# action_list can be nil for just parsing
-
1
def parse_and_reify!(action_list)
-
ret = self
-
return ret unless action_list
-
info_per_node = Hash.new #indexed by node_id
-
@ordered_components.each do |serialized_action|
-
cmp_ref,method_name = Action::WithMethod.parse(serialized_action)
-
cmp_type,cmp_title = [cmp_ref,nil]
-
if cmp_ref =~ CmpRefWithTitleRegexp
-
cmp_type,cmp_title = [$1,$2]
-
end
-
matching_actions = action_list.select{|a|a.match_component_ref?(cmp_type,cmp_title)}
-
matching_actions.each do |a|
-
node_id = a.node_id
-
pntr = info_per_node[node_id] ||= {:actions => Array.new, :name => a.node_name, :id => node_id}
-
pntr[:actions] << serialized_action
-
end
-
end
-
info_per_node.each_value do |n|
-
if node_actions = InterNode.parse_and_reify_node_actions?({Constant::OrderedComponents => n[:actions]},n[:name],n[:id],action_list)
-
merge!(node_actions)
-
end
-
end
-
ret
-
end
-
1
CmpRefWithTitleRegexp = /(^[^\[]+)\[([^\]]+)\]$/
-
-
1
def serialized_multi_node_type()
-
"All_applicable"
-
end
-
-
end
-
end
-
end
-
end; end; end; end
-
-
-
3
module DTK; class Task; class Template
-
1
class Stage
-
1
class IntraNode
-
1
r8_nested_require('intra_node','execution_block')
-
1
r8_nested_require('intra_node','execution_blocks')
-
1
class Processor
-
1
def initialize(temporal_constraints)
-
@intra_node_contraints = temporal_constraints.select{|r|r.intra_node?()}
-
end
-
1
def process(intra_node_unordered)
-
# first break unordered node into execution blocks
-
# then order each execution block
-
# TODO: right now just ordering within each execution block; want to expand to look for global inconsistencies
-
exec_blocks = intra_node_unordered.break_into_execution_blocks()
-
exec_blocks.order_each_block(@intra_node_contraints)
-
end
-
end
-
# although in an array, order does not make a difference
-
1
class Unordered < Array
-
1
def break_into_execution_blocks()
-
ndx_ret = Hash.new
-
each do |action|
-
(ndx_ret[execution_block_index(action)] ||= ExecutionBlock::Unordered.new) << action
-
end
-
ret = ExecutionBlocks.new
-
ndx_ret.keys.sort.each{|exec_block_index|ret << ndx_ret[exec_block_index]}
-
ret
-
end
-
1
private
-
1
def execution_block_index(action)
-
unless source_type = action.source_type
-
raise Error.new("Cannot find source type for action (#{action.inspect})")
-
end
-
unless ret = ExecBlockOrder[source_type]
-
raise Error.new("Not yet implemented, finding execution block order for action with source of type (#{source_type})")
-
end
-
ret
-
end
-
1
ExecBlockOrder = {
-
:node_group => 0,
-
:node => 0,
-
:assembly => 1
-
}
-
end
-
end
-
end
-
end; end; end
-
3
module DTK; class Task; class Template
-
2
class Stage; class IntraNode
-
1
class ExecutionBlock < Array
-
1
include Serialization
-
1
def node()
-
# all the elements have same node so can just pick first
-
first && first[:node]
-
end
-
-
1
def components()
-
map{|action|component(action)}
-
end
-
1
def component(action)
-
action.hash_subset(*Component::Instance.component_list_fields)
-
end
-
1
private :component
-
-
# opts can be
-
# :group_nums
-
# :action_methods
-
1
def components_hash_with(opts={})
-
map do |action|
-
cmp_hash = {:component => component(action)}
-
if opts[:group_nums]
-
cmp_hash.merge!(:component_group_num => action.component_group_num)
-
end
-
if opts[:action_methods]
-
if action_method = action.action_method?()
-
cmp_hash.merge!(:action_method => action_method)
-
end
-
end
-
cmp_hash
-
end
-
end
-
-
1
def find_earliest_match?(action_match,action_indexes)
-
each_action_with_position do |a,pos|
-
if action_indexes.include?(a.index)
-
action_match.action = a
-
action_match.action_position = pos
-
return true
-
end
-
end
-
false
-
end
-
-
1
def has_action_with_method?()
-
!!find{|a|a.kind_of?(Action::WithMethod)}
-
end
-
1
def all_actions_with_method?()
-
!find{|a|!(a.kind_of?(Action::WithMethod))}
-
end
-
-
1
def delete_action!(action_match)
-
delete_at(action_match.action_position()-1)
-
:empty if empty?()
-
end
-
-
1
def splice_in_action!(action_match,insert_point)
-
case insert_point
-
when :end
-
self << action_match.insert_action
-
when :before_action_pos
-
insert(action_match.action_position-1,action_match.insert_action)
-
else raise Error.new("Unexpected insert_point (#{insert_point})")
-
end
-
end
-
-
1
def serialization_form(opts={})
-
items = Array.new
-
component_group_num = 1
-
component_group = nil
-
all_actions = all_actions_with_method?()
-
each do |a|
-
# if cgn = a.component_group_num
-
# TODO: see if can avoid this by avoding actions be reified as component group
-
cgn = a.component_group_num
-
if cgn and !all_actions
-
unless cgn == component_group_num
-
SerializedComponentGroup.add?(items,component_group)
-
component_group = nil
-
component_group_num = cgn
-
end
-
component_group ||= Array.new
-
serialization_form_add_action?(component_group,a,opts)
-
else
-
SerializedComponentGroup.add?(items,component_group)
-
component_group = nil
-
serialization_form_add_action?(items,a,opts)
-
end
-
end
-
SerializedComponentGroup.add?(items,component_group)
-
unless items.empty?
-
# order of clauses important
-
# look for special cases where all actions with methods or single component group
-
if all_actions
-
{Constant::Actions => items}
-
elsif items.size == 1 and items.first.kind_of?(SerializedComponentGroup)
-
{Constant::Components => items.first.components()}
-
else
-
{Constant::OrderedComponents => items}
-
end
-
end
-
end
-
-
# action list can be nil just for parsing
-
1
def self.parse_and_reify(serialized_eb,node_name,action_list,opts={})
-
ret = new()
-
return ret unless action_list
-
lvs = ParsingError::LegalValues.new()
-
ordered_items =
-
if lvs.add_and_match?(serialized_eb){HashWithKey(Constant::OrderedComponents)}
-
serialized_eb[Constant::OrderedComponents]
-
elsif lvs.add_and_match?(serialized_eb){HashWithKey(Constant::Components)}
-
# normalize from component form into ordered_component_form
-
[{Constant::ComponentGroup => serialized_eb[Constant::Components]}]
-
elsif lvs.add_and_match?(serialized_eb){HashWithKey(Constant::Actions)}
-
# normalize from action form into ordered_component_form
-
[{Constant::ComponentGroup => Constant.matches?(serialized_eb,:Actions)}]
-
else
-
raise ParsingError::WrongType.new(serialized_eb,lvs)
-
end
-
-
component_group_num = 1
-
(ordered_items||[]).each do |serialized_item|
-
lvs = ParsingError::LegalValues.new()
-
if lvs.add_and_match?(serialized_item,String)
-
find_and_add_action!(ret,serialized_item,node_name,action_list,opts)
-
elsif lvs.add_and_match?(serialized_item){HashWithSingleKey(Constant::ComponentGroup)}
-
component_group = serialized_item.values.first
-
ParsingError.raise_error_unless(component_group,[String,Array])
-
Array(component_group).each do |serialized_action|
-
ParsingError.raise_error_unless(serialized_action,String)
-
find_and_add_action!(ret,serialized_action,node_name,action_list,opts.merge(:component_group_num => component_group_num))
-
end
-
component_group_num += 1
-
else
-
raise ParsingError::WrongType.new(serialized_item,lvs)
-
end
-
end
-
ret
-
end
-
-
1
def intra_node_stages()
-
ret = Array.new
-
component_group_num = 1
-
component_group = nil
-
components_hash_with(:group_nums=>true).map do |cmp_with_group_num|
-
cmp = cmp_with_group_num[:component]
-
if cgn = cmp_with_group_num[:component_group_num]
-
unless cgn == component_group_num
-
ret << component_group if component_group
-
component_group = nil
-
component_group_num = cgn
-
end
-
component_group ||= Array.new
-
component_group << cmp[:id]
-
else
-
ret << component_group if component_group
-
component_group = nil
-
ret << cmp[:id]
-
end
-
end
-
ret << component_group if component_group
-
ret
-
end
-
-
1
private
-
# has form {Constant::ComponentGroup => [cmp1,cmp2,..]
-
1
class SerializedComponentGroup < Hash
-
1
include Serialization
-
1
def self.add?(ret,component_group)
-
if component_group
-
ret << new().merge(Constant::ComponentGroup => component_group)
-
end
-
end
-
1
def components()
-
values.first
-
end
-
end
-
-
1
def serialization_form_add_action?(ret,action,opts={})
-
if item = action.serialization_form(opts)
-
if method_name = action.method_name?()
-
item << ".#{method_name}"
-
end
-
ret << item
-
end
-
end
-
-
1
def self.find_and_add_action!(ret,serialized_item,node_name,action_list,opts={})
-
if action = Action.find_action_in_list?(serialized_item,node_name,action_list,opts)
-
ret << action
-
end
-
end
-
-
1
def each_action_with_position(&block)
-
each_with_index{|a,i|block.call(a,i+1)}
-
end
-
-
1
class Unordered < self
-
1
def order(intra_node_contraints,strawman_order=nil)
-
# short-cut, no ordering if singleton
-
if size < 2
-
return Ordered.new(self)
-
end
-
ret = Ordered.new()
-
sorted_action_indexes = intra_node_contraints.ret_sorted_action_indexes(self)
-
ndx_action_list = inject(Hash.new){|h,a|h.merge(a.index => a)}
-
sorted_action_indexes.each{|index|ret << ndx_action_list[index]}
-
ret
-
end
-
end
-
-
1
class Ordered < self
-
1
def initialize(array=nil)
-
super()
-
if array
-
array.each{|el|self << el}
-
end
-
end
-
end
-
end
-
-
end; end
-
end; end; end
-
3
module DTK; class Task; class Template
-
2
class Stage; class IntraNode
-
1
class ExecutionBlocks < Array
-
1
include Serialization
-
1
def add_subtask!(parent_task,internode_stage_index,assembly_idh=nil)
-
executable_action = Task::Action::ConfigNode.create_from_execution_blocks(self,assembly_idh)
-
executable_action.set_inter_node_stage!(internode_stage_index)
-
sub_task = Task.create_stub(parent_task.model_handle(),:executable_action => executable_action)
-
parent_task.add_subtask(sub_task)
-
executable_action
-
end
-
-
1
def find_earliest_match?(action_match,action_indexes)
-
each_with_index do |eb,i|
-
if eb.find_earliest_match?(action_match,action_indexes)
-
action_match.execution_block_index = i+1
-
return true
-
end
-
end
-
false
-
end
-
-
1
def has_action_with_method?()
-
!!find{|eb|eb.has_action_with_method?()}
-
end
-
-
1
def delete_action!(action_match)
-
eb_index = action_match.execution_block_index()
-
if :empty == execution_block(eb_index).delete_action!(action_match)
-
delete_execution_block!(eb_index)
-
:empty if empty?()
-
end
-
end
-
-
1
def splice_in_action!(action_match,insert_point)
-
case insert_point
-
when :end_last_execution_block
-
execution_block(:last).splice_in_action!(action_match,:end)
-
when :before_action_pos
-
execution_block(action_match.execution_block_index()).splice_in_action!(action_match,insert_point)
-
else raise Error.new("Unexpected insert_point (#{insert_point})")
-
end
-
end
-
# TODO: have above subsume below
-
1
def splice_in_at_beginning!(execution_blocks)
-
insert(0,*execution_blocks)
-
self
-
end
-
-
1
def serialization_form(opts={})
-
opts_x = {:no_node_name_prefix => true}.merge(opts)
-
execution_blocks = map{|eb|eb.serialization_form(opts_x)}.compact
-
return nil if execution_blocks.empty?()
-
-
ret = OrderedHash.new()
-
if node_name = node_name()
-
node_field_term = ((node() and node().is_node_group?()) ? Constant::NodeGroup : Constant::Node).to_sym
-
ret[node_field_term] = node_name
-
end
-
if execution_blocks.size == 1
-
# if single execution block then we remove this level of nesting
-
ret.merge(execution_blocks.first)
-
else
-
ret.merge(Field::ExecutionBlocks => execution_blocks)
-
end
-
end
-
1
def self.parse_and_reify(serialized_node_actions,node_name,action_list,opts={})
-
# normalize to take into account it may be single execution block
-
normalized_content = serialized_node_actions.kind_of?(Hash) && serialized_node_actions[Field::ExecutionBlocks]
-
normalized_content ||= [serialized_node_actions]
-
ret = new()
-
normalized_content.each{|serialized_eb|ret << ExecutionBlock::Ordered.parse_and_reify(serialized_eb,node_name,action_list,opts)}
-
ret
-
end
-
-
1
def order_each_block(intra_node_constraints)
-
ret = self.class.new()
-
each do |unordered_exec_block|
-
ret << unordered_exec_block.order(intra_node_constraints)
-
end
-
ret
-
end
-
-
1
def intra_node_stages()
-
ret = Array.new
-
return ret if empty?()
-
if find{|eb|!eb.kind_of?(ExecutionBlock::Ordered)}
-
raise Error.new("The method ExecutionBlocks#intra_node_stages can only be called if all its elements are ordered")
-
end
-
map{|eb|eb.intra_node_stages()}
-
end
-
-
1
def node()
-
# all the elements have same node so can just pick first
-
first && first.node()
-
end
-
-
1
def node_name()
-
(node()||{})[:display_name]
-
end
-
-
1
def components()
-
ret = Array.new
-
each{|exec_block|ret += exec_block.components()}
-
ret
-
end
-
-
1
def components_hash_with(opts={})
-
ret = Array.new
-
each{|exec_block|ret += exec_block.components_hash_with(opts)}
-
ret
-
end
-
-
1
private
-
1
def execution_block(execution_block_index)
-
if execution_block_index == :last
-
last()
-
else
-
self[execution_block_index-1]
-
end
-
end
-
-
1
def delete_execution_block!(execution_block_index)
-
delete_at(execution_block_index-1)
-
end
-
end
-
end; end
-
end; end; end
-
3
module DTK; class Task; class Template
-
1
class TaskParams
-
1
include MustacheTemplateMixin
-
-
1
def initialize(task_params)
-
@task_params = task_params
-
end
-
-
1
def self.bind_task_params(hash,task_params)
-
new(task_params).substitute_vars(hash)
-
end
-
-
1
def substitute_vars(object)
-
if object.kind_of?(Array)
-
ret = object.class.new
-
object.each{|el|ret << substitute_vars(el)}
-
ret
-
elsif object.kind_of?(Hash)
-
object.inject(object.class.new){|h,(k,v)|h.merge(k => substitute_vars(v))}
-
elsif object.kind_of?(String)
-
substitute_vars_in_string(object)
-
else
-
object
-
end
-
end
-
1
private
-
-
1
def substitute_vars_in_string(string)
-
unless needs_template_substitution?(string)
-
return string
-
end
-
-
begin
-
bind_template_attributes_utility(string,@task_params)
-
rescue MustacheTemplateError::MissingVar => e
-
ident = 4
-
err_msg = "The variable '#{e.missing_var}' in the following workflow term is not set:\n#{' '*ident}#{string}"
-
raise ErrorUsage.new(err_msg)
-
rescue MustacheTemplateError => e
-
raise ErrorUsage.new("Unbound variable in the workflow: #{e.error_message}")
-
end
-
end
-
end
-
end; end; end
-
2
module DTK; class Task
-
1
class Template
-
1
class TemporalConstraint
-
1
r8_nested_require('temporal_constraint','config_component')
-
1
def initialize(before_action,after_action)
-
@before_action = before_action
-
@after_action = after_action
-
end
-
-
1
attr_reader :before_action,:after_action
-
-
# subclasses override
-
1
def intra_node?()
-
@before_action.node_id == @after_action.node_id
-
end
-
1
def inter_node?()
-
@before_action.node_id != @after_action.node_id
-
end
-
-
1
def before_action_index()
-
@before_action.index
-
end
-
1
def after_action_index()
-
@after_action.index
-
end
-
end
-
end
-
end; end
-
3
module DTK; class Task; class Template
-
1
class TemporalConstraint
-
1
class ConfigComponent < self
-
1
class IntraNode < self
-
end
-
-
1
class PortLinkOrder < self
-
end
-
-
1
class DynamicAttribute < self
-
end
-
-
end
-
end
-
end; end; end
-
2
module DTK; class Task
-
1
class Template
-
1
class TemporalConstraints < Array
-
1
r8_nested_require('temporal_constraints','config_components')
-
-
1
def +(temporal_contraints)
-
ret = self.class.new(self)
-
temporal_contraints.each{|a|ret << a}
-
ret
-
end
-
1
def select(&body)
-
ret = self.class.new()
-
each{|r|ret << r if body.call(r)}
-
ret
-
end
-
-
1
def ret_sorted_action_indexes(action_list)
-
before_index_hash = create_before_index_hash(action_list)
-
before_index_hash.tsort_form.tsort()
-
end
-
# only uses a constraint if both members belong to action_list
-
1
def create_before_index_hash(action_list)
-
action_indexes = action_list.map{|a|a.index}
-
ret = BeforeIndexHash.new(action_indexes)
-
each do |constraint|
-
after_action_index = constraint.after_action_index
-
before_action_index = constraint.before_action_index
-
if action_indexes.include?(after_action_index) and action_indexes.include?(before_action_index)
-
ret.add(after_action_index,before_action_index)
-
end
-
end
-
ret
-
end
-
-
1
private
-
1
def initialize(array=nil)
-
super()
-
array.each{|a|self << a} if array
-
@after_relation = nil
-
end
-
-
1
class BeforeIndexHash < Hash
-
1
def initialize(action_indexes)
-
super()
-
action_indexes.each{|action_index|self[action_index] = Hash.new}
-
end
-
-
1
def add(after_action_index,before_action_index)
-
self[after_action_index][before_action_index] = true
-
end
-
-
1
def tsort_form()
-
inject(TSortHash.new) do |h,(after_index,index_info)|
-
h.merge(after_index => index_info.keys)
-
end
-
end
-
-
1
def ret_and_remove_actions_not_after_any!()
-
ret = Array.new
-
each_key do |action_index|
-
if self[action_index].empty?
-
delete(action_index)
-
ret << action_index
-
end
-
end
-
# for all indexes in ret, remove them in the before hash
-
each_value do |before_hash|
-
before_hash.each_key do |before_action_instance|
-
if ret.include?(before_action_instance)
-
before_hash.delete(before_action_instance)
-
end
-
end
-
end
-
-
ret
-
end
-
end
-
end
-
end
-
end; end
-
-
3
module DTK; class Task; class Template
-
1
class TemporalConstraints
-
1
class ConfigComponents < self
-
1
def self.get(assembly,cmp_action_list)
-
ret = new()
-
return ret if cmp_action_list.empty?
-
-
# indexed by [node_id][:cmp_id]
-
ndx_cmp_list = Indexed.new(cmp_action_list)
-
-
# ordering constraint come from teh following sources
-
# dynamic attributes
-
# port links with temporal order set
-
# intra_node rels - (from the component_order and dependency rels)
-
get_from_port_links(assembly,ndx_cmp_list) +
-
get_from_dynamic_attribute_rel(ndx_cmp_list) +
-
get_intra_node_rels(ndx_cmp_list)
-
end
-
1
private
-
1
def self.get_from_port_links(assembly,ndx_cmp_list)
-
ret = new()
-
ordered_port_links = assembly.get_port_links(:filter => [:neq,:temporal_order,nil])
-
return ret if ordered_port_links.empty?
-
sp_hash = {
-
:cols => [:ports,:temporal_order],
-
:filter => [:oneof, :id, ordered_port_links.map{|r|r.id}]
-
}
-
-
aug_port_links = Model.get_objs(assembly.model_handle(:port_link),sp_hash)
-
aug_port_links.map do |pl|
-
before_port = pl[DirField[pl[:temporal_order].to_sym][:before_field]]
-
after_port = pl[DirField[pl[:temporal_order].to_sym][:after_field]]
-
before_cmp_list_el = ndx_cmp_list.el(before_port[:node_node_id],before_port[:component_id])
-
after_cmp_list_el = ndx_cmp_list.el(after_port[:node_node_id],after_port[:component_id])
-
if constraint = create_temporal_constraint?(:port_link_order,before_cmp_list_el,after_cmp_list_el)
-
ret << constraint
-
end
-
end
-
ret
-
end
-
1
DirField = {
-
:before => {:before_field => :input_port, :after_field => :output_port},
-
:after => {:before_field => :output_port, :after_field => :input_port}
-
}
-
-
1
def self.get_from_dynamic_attribute_rel(ndx_cmp_list)
-
ret = new()
-
attr_mh = ndx_cmp_list.model_handle(:attribute)
-
filter = [:oneof,:component_component_id,ndx_cmp_list.component_ids]
-
# shortcut if no dynamic attributes
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and, [:eq,:dynamic,true], filter]
-
}
-
return ret if Model.get_objs(attr_mh,sp_hash).empty?
-
-
# get augmented attr list, needed for dependency analysis
-
aug_attr_list = Attribute.get_augmented(attr_mh,filter)
-
Attribute.guarded_attribute_rels(aug_attr_list) do |guard_rel|
-
guard_attr = guard_rel[:guard_attr]
-
guarded_attr = guard_rel[:guarded_attr]
-
before_cmp_list_el = ndx_cmp_list.el(guard_attr[:node][:id],guard_attr[:component][:id])
-
after_cmp_list_el = ndx_cmp_list.el(guarded_attr[:node][:id],guarded_attr[:component][:id])
-
if constraint = create_temporal_constraint?(:dynamic_attribute,before_cmp_list_el,after_cmp_list_el)
-
ret << constraint
-
end
-
end
-
ret
-
end
-
-
1
def self.get_intra_node_rels(ndx_cmp_list)
-
ret = new()
-
# TODO: more efficient way to do this; right now just leevraging existing methods; also these methods draw these relationships from
-
# component templates, not component instances
-
cmp_deps = Component::Instance.get_ndx_intra_node_rels(ndx_cmp_list.component_idhs())
-
cmp_deps.reject!{|cmp_id,info|info[:component_dependencies].empty?}
-
return ret if cmp_deps.empty?
-
-
# component dependencies just have component type;
-
# TODO: may extend so that it can match on title
-
cmp_deps.each do |cmp_id,dep_info|
-
ndx_cmp_list.els(cmp_id) do |node_id,after_cmp_list_el|
-
dep_info[:component_dependencies].each do |before_cmp_type|
-
ndx_cmp_list.index_by_node_id_cmp_type(node_id,before_cmp_type).each do |before_cmp_list_el|
-
if constraint = create_temporal_constraint?(:intra_node,before_cmp_list_el,after_cmp_list_el)
-
ret << constraint
-
end
-
end
-
end
-
end
-
end
-
ret
-
end
-
-
1
def self.create_temporal_constraint?(type,before_cmp_list_el,after_cmp_list_el)
-
if before_cmp_list_el and after_cmp_list_el
-
klass =
-
case type
-
when :intra_node then TCBase()::IntraNode
-
when :port_link_order then TCBase()::PortLinkOrder
-
when :dynamic_attribute then TCBase()::DynamicAttribute
-
end
-
klass.new(before_cmp_list_el,after_cmp_list_el) if klass
-
end
-
end
-
-
1
def self.TCBase()
-
TemporalConstraint::ConfigComponent
-
end
-
-
1
class Indexed < SimpleHashObject
-
1
def initialize(component_list)
-
super()
-
@component_id_info = Hash.new
-
@ndx_by_node_id_cmp_type = Hash.new
-
@cmp_model_handle = component_list.first && component_list.first.model_handle()
-
-
component_list.each do |cmp|
-
cmp_id = cmp[:id]
-
node_id = cmp[:node][:id]
-
(self[node_id] ||= Hash.new)[cmp_id] = cmp
-
@component_id_info[cmp_id] ||= cmp.id_handle()
-
pntr = @ndx_by_node_id_cmp_type[node_id] ||= Hash.new
-
(pntr[cmp[:component_type]] ||= Array.new) << cmp
-
end
-
end
-
-
1
def component_ids()
-
@component_id_info.keys
-
end
-
1
def component_idhs()
-
@component_id_info.values
-
end
-
-
1
def el(node_id,cmp_id)
-
(self[node_id]||{})[cmp_id]
-
end
-
-
# block has params node_id, cmp_list_el
-
1
def els(cmp_id,&block)
-
each_pair do |node_id,ndx_by_cmp|
-
if cmp_list_el = ndx_by_cmp[cmp_id]
-
block.call(node_id,cmp_list_el)
-
end
-
end
-
end
-
1
def index_by_node_id_cmp_type(node_id,cmp_type)
-
(@ndx_by_node_id_cmp_type[node_id]||{})[cmp_type]||[]
-
end
-
-
1
def model_handle(model_name)
-
@cmp_model_handle && @cmp_model_handle.createMH(model_name)
-
end
-
-
end
-
-
end
-
end
-
end; end; end
-
-
1
module XYZ
-
1
class TaskError < Model
-
end
-
end
-
1
module XYZ
-
1
class TaskEvent < Model
-
1
def self.create_event?(event_type,task,result)
-
action = task[:executable_action]
-
return nil unless action
-
if action.kind_of?(Task::Action::PowerOnNode)
-
# TODO: Look into this see if neccessery
-
Log.warn "TODO: >>>>>>>> CREATING POWER ON NODE EVEN <<<<<<<<< IMPLEMENTATION NEEDED"
-
nil
-
elsif action.kind_of?(Task::Action::CreateNode)
-
case event_type
-
when :start
-
StartCreateNode.create_start?(action)
-
when :complete_succeeded,:complete_failed,:complete_timeout
-
CompleteCreateNode.create_complete?(action,event_type,result)
-
end
-
elsif action.kind_of?(Task::Action::ConfigNode)
-
case event_type
-
when :start
-
StartConfigNode.create_start?(action)
-
when :complete_succeeded,:complete_failed,:complete_timeout
-
CompleteConfigNode.create_complete?(action,event_type,result)
-
end
-
end
-
end
-
-
1
class Event < HashObject
-
1
def self.create_start?(action)
-
is_no_op?(action) ? nil : new(action)
-
end
-
1
def self.create_complete?(action,status,result)
-
is_no_op?(action) ? nil : new(action,status,result)
-
end
-
1
private
-
# gets overritten if needed
-
1
def self.is_no_op?(action)
-
nil
-
end
-
-
1
def attr_val_pairs(attributes)
-
(attributes||[]).inject({}) do |h,a|
-
name = a[:display_name].to_sym
-
AttrIgnoreList.include?(name) ? h : h.merge(name => a[:attribute_value])
-
end
-
end
-
1
AttrIgnoreList = [:sap__l4]
-
end
-
-
1
class StartCreateNode < Event
-
# TODO: should encapsulate this at workflow or iaas level
-
1
def self.is_no_op?(action)
-
ext_ref = action[:node][:external_ref]
-
ext_ref[:type] == "ec2_instance" and ext_ref[:instance_id]
-
end
-
-
1
def initialize(action)
-
node = action[:node]
-
ext_ref = node[:external_ref]
-
ext_ref_type = ext_ref[:type]
-
hash = {
-
:event => "initiating_create_node",
-
:node_name => node[:display_name],
-
:node_type => ext_ref_type.to_s,
-
}
-
# TODO: should encapsulate this in call to iaas sdapter
-
case ext_ref_type
-
when "ec2_instance", "ec2_image" #TODO: may chaneg code so dont get ec2_image
-
hash.merge!(:image_id => ext_ref[:image_id])
-
else
-
Log.error("external ref type #{ext_ref_type} not treated")
-
end
-
hash.merge(attr_val_pairs(action[:attributes]))
-
super(hash)
-
end
-
end
-
-
1
class CompleteCreateNode < Event
-
1
def initialize(action,status,result)
-
node = action[:node]
-
ext_ref = node[:external_ref]
-
ext_ref_type = ext_ref[:type]
-
hash = {
-
:event => "completed_create_node",
-
:node_name => node[:display_name],
-
:node_type => ext_ref_type.to_s,
-
:status => status
-
}
-
hash.merge(attr_val_pairs(action[:attributes]))
-
if status == :complete_failed
-
if error_msg = error_msg(result)
-
hash.merge!(:error_msg => error_msg)
-
end
-
end
-
super(hash)
-
end
-
1
private
-
1
def error_msg(result)
-
# TODO: stub
-
if error_obj = result[:error_object]
-
error_obj.to_s
-
end
-
end
-
end
-
-
1
class StartConfigNode < Event
-
1
def initialize(action)
-
cmp_info = action.component_actions().map do |cmp_attrs|
-
attr_info = attr_val_pairs(cmp_attrs[:attributes].reject{|a|a[:dynamic]})
-
{:component_name => cmp_attrs[:component][:display_name]}.merge(attr_info)
-
end
-
hash = {
-
:event => "initiating_config_node",
-
:node_name => action[:node][:display_name],
-
:components => cmp_info
-
}
-
super(hash)
-
end
-
end
-
-
1
class CompleteConfigNode < Event
-
1
def initialize(action,status,result)
-
hash = {
-
:event => status.to_s,
-
:node_name => action[:node][:display_name],
-
:components => action.component_actions().map{|cmp_attrs| cmp_attrs[:component][:display_name]}
-
}
-
if errors = (result[:data]||{})[:errors]
-
hash.merge!(:errors => errors)
-
end
-
dyn_attrs = dynamic_attributes(status,result)
-
hash.merge!(:dynamic_attributes => dyn_attrs) unless dyn_attrs.empty?
-
super(hash)
-
end
-
1
private
-
1
def dynamic_attributes(status,result)
-
ret = Hash.new
-
return ret unless status == :complete_succeeeded
-
return ret unless dyn_attrs = (result[:data]||{})[:dynamic_attributes]
-
dyn_attrs.each do |da|
-
cmp = ret[da[:component_name]] ||= Hash.new
-
cmp[da[:attribute_name]] = da[:attribute_val]
-
end
-
ret
-
end
-
end
-
end
-
end
-
1
module XYZ
-
1
class TaskLog < Model
-
1
def self.get_and_update_logs_content(task,assoc_nodes,log_filter)
-
ret_info = assoc_nodes.inject({}){|h,n|h.merge(n[:task_id] => {:node => n})}
-
sp_hash = {
-
:cols => [:id,:status,:type,:content, :task_id],
-
:filter => [:oneof, :task_id, assoc_nodes.map{|n|n[:task_id]}]
-
}
-
task_log_mh = task.model_handle.createMH(:task_log)
-
task_logs = Model.get_objects_from_sp_hash(task_log_mh,sp_hash)
-
-
# associate logs with task_ids and kick of deferred job to get logs for all nodes that dont haev compleet logs in db
-
task_logs.each do |task_log|
-
task_id = task_log[:task_id]
-
# implicit assumption that only one log per task
-
ret_info[task_id].merge!(task_log.slice({:content => :log},:status,:type))
-
end
-
incl_assoc_nodes = ret_info.values.reject{|t|t[:status] == "complete"}.map{|info|info[:node]}
-
unless incl_assoc_nodes.empty?
-
# initiate defer task to get logs
-
task_pbuilderid_index = incl_assoc_nodes.inject({}){|h,n|h.merge(Node.pbuilderid(n) => n[:task_id])}
-
config_agent_types = assoc_nodes.inject({}){|h,n|h.merge(n[:task_id] => n[:config_agent_type])}
-
callbacks = {
-
:on_msg_received => proc do |msg|
-
response = CommandAndControl.parse_response__get_logs(task,msg)
-
if response[:status] == :ok
-
task_id = task_pbuilderid_index[response[:pbuilderid]]
-
task_idh = task.model_handle.createIDH(:id => task_id)
-
config_agent_type = config_agent_types[task_id]
-
TaskLog.create_or_update(task_idh,config_agent_type.to_s,response[:log_content])
-
else
-
Log.error("error response for request to get log")
-
# TODO: put some subset of this in error msg
-
pp msg
-
end
-
end
-
}
-
CommandAndControl.request__get_logs(task,incl_assoc_nodes,callbacks,:log_type => :config_agent)
-
end
-
ret_info.values.inject({}){|h,log_info|h.merge(log_info[:node][:id] => log_info)}
-
end
-
-
1
def self.create_or_update(task_idh,log_type,log_content)
-
task_id = task_idh.get_id()
-
status = ParseLog.log_complete?(log_type,log_content) ? "complete" : "in_progress"
-
-
# create if needed
-
sp_hash = {
-
:cols => [:id],
-
:filter => [:and, [:eq, :task_id, task_id],
-
[:eq, :type, log_type.to_s]]
-
}
-
task_log_mh = task_idh.createMH(:task_log)
-
existing = Model.get_objects_from_sp_hash(task_log_mh,sp_hash).first
-
row = {
-
:task_id => task_id,
-
:status => status,
-
:type => log_type.to_s,
-
:content => log_content
-
}
-
if existing
-
id = existing[:id]
-
ret = task_log_mh.createIDH(:id => id)
-
row.merge!(:id => id)
-
Model.update_from_rows(task_log_mh,[row])
-
else
-
row.merge!(:ref => log_type.to_s)
-
ret = Model.create_from_row(task_log_mh,row,:convert => true)
-
end
-
ret
-
end
-
end
-
end
-
1
module XYZ
-
# TODO: did not put in XYZ module because Ramaze user helper access ::User
-
# include XYZ
-
1
class User < Model
-
-
1
def self.common_columns()
-
[
-
10
:c,
-
:id,
-
:username,
-
:password,
-
:user_groups,
-
:default_namespace,
-
:catalog_username,
-
:catalog_password
-
]
-
end
-
-
1
def self.create_user_in_groups?(user_mh,username,opts={})
-
groupnames = [UserGroup.all_groupname(),UserGroup.private_groupname(username)]
-
user_hash = Aux.HashHelper(
-
:password => DataEncryption.hash_it(opts[:password] || random_generate_password()),
-
:default_namespace => opts[:namespace]||username,
-
:catalog_password => opts[:catalog_password],
-
:catalog_username => opts[:catalog_username]
-
)
-
-
user_id = create_from_row?(user_mh,username,{:username => username},user_hash).get_id()
-
groupnames.each do |groupname|
-
group_id = create_from_row?(user_mh.createMH(:user_group), groupname, { :groupname => groupname }).get_id
-
create_from_row?(user_mh.createMH(:user_group_relation), "#{username}-#{groupname}", { :user_id => user_id, :user_group_id => group_id })
-
end
-
get_user(user_mh,username)
-
end
-
-
1
def self.authenticate(hash)
-
10
username, password = hash[:username], hash[:password]
-
10
model_handle = ModelHandle.new(hash[:c],:user)
-
10
if user = get_user(model_handle,username)
-
10
user if user.authenticated?(password)
-
end
-
end
-
-
1
def get_namespace()
-
# TODO: [Haris] Add namespace to database got to double check with Rich
-
self[:username]
-
end
-
-
1
def username
-
self[:username] || self.update_object!(:username)[:username]
-
end
-
-
1
def catalog_username
-
15
self[:catalog_username] || self.update_obj!(:catalog_username)[:catalog_username]
-
end
-
-
1
def catalog_password
-
15
self[:catalog_password] || self.update_obj!(:catalog_password)[:catalog_password]
-
end
-
-
# TODO: temp
-
1
def authenticated?(hashed_password)
-
10
self[:password] && (self[:password] == hashed_password)
-
end
-
-
1
def self.get_user(model_handle,username)
-
10
sp_hash = {
-
:relation => :user,
-
:filter => [:and,[:eq, :username, username]],
-
:columns => common_columns()
-
}
-
-
10
get_full_user(model_handle, sp_hash)
-
end
-
-
1
def self.get_user_by_id(model_handle, user_id)
-
sp_hash = {
-
:relation => :user,
-
:filter => [:and,[:eq, :id, user_id.to_i]],
-
:columns => common_columns()
-
}
-
-
get_full_user(model_handle, sp_hash)
-
end
-
-
1
def get_setting(key)
-
(self[:settings]||{})[key]
-
end
-
-
1
def get_private_group()
-
# makes assumption taht private group is one where username and groupname are the same
-
# TODO: more efficient way of getting this
-
group_rows = get_objs(:cols => [:username,:user_groups])
-
# TODO: probably better to put in attribute for group which means user private group rather than having naming convention assumption here
-
selected_row = group_rows.find{|r|r[:user_group][:groupname] == "user-#{r[:username]}"}
-
selected_row && selected_row[:user_group]
-
end
-
-
1
def update_password(password)
-
update_hash = {:id => id(), :password => DataEncryption.hash_it(password) }
-
Model.update_from_rows(model_handle,[update_hash])
-
end
-
-
1
def update_settings(key_value_hash)
-
# convert to sym form TODO: change code so this is not necessary
-
kv_hash_sym_form = key_value_hash.inject({}){|h,kv|h.merge(kv[0].to_sym => kv[1])}
-
update_hash = {:id => id(),:settings => kv_hash_sym_form}
-
Model.update_from_rows(model_handle,[update_hash],:partial_value=>true)
-
end
-
-
-
1
def to_json_hash
-
json_string = JSON_FIELDS.inject({}) { |res, field| res.merge!({ field => self[field] }) }
-
return json_string
-
end
-
-
1
private
-
-
1
def self.random_generate_password()
-
Aux.random_generate(:length => 8, :type => /[a-z]/)
-
end
-
-
1
def self.get_full_user(model_handle, sp_hash)
-
10
rows = Model.get_objs(model_handle,sp_hash)
-
10
return nil if rows.empty?
-
# all rows will be same except for on :user_group and :user_group_relation cols
-
30
group_ids = rows.map{|r|(r[:user_group]||{})[:id]}.compact
-
100
rows.first.reject{|k,v|[:user_group,:user_group_relation].include?(k)}.merge(:group_ids => group_ids)
-
end
-
-
1
JSON_FIELDS = [:c, :id, :username, :password, :group_ids, :default_namespace]
-
-
-
1
def self.from_json(json_string)
-
user_object = self.new({},json_string[:c])
-
json_string.each do |k,v|
-
user_object[k.to_sym] = v
-
end
-
-
return user_object
-
end
-
-
1
def self.create_user_session_hash(user_object)
-
return {} unless user_object
-
return {
-
"credentials" => {
-
"username" => user_object[:username],
-
"password" => user_object[:password],
-
"c" => user_object[:c],
-
"default_namespace" => user_object[:default_namespace],
-
"access_time" => Time.now.to_s
-
}
-
}
-
end
-
-
end
-
end
-
1
module XYZ
-
1
class UserGroup < Model
-
1
def self.all_groupname()
-
"all"
-
end
-
-
1
def self.private_groupname(username)
-
"user-#{username}"
-
end
-
-
1
def self.get_all_group(model_handle)
-
get_by_groupname(model_handle,all_groupname())
-
end
-
-
1
def self.get_private_group(model_handle,username)
-
get_by_groupname(model_handle,private_groupname(username))
-
end
-
-
1
def self.get_by_groupname(model_handle,groupname)
-
sp_hash = {
-
:cols => [:id, :groupname],
-
:filter => [:eq, :groupname,groupname]
-
}
-
get_obj(model_handle,sp_hash)
-
end
-
end
-
end
-
1
module XYZ
-
1
class UserGroupRelation < Model
-
end
-
end
-
# TODO: remove or cleanup; determine if we need to persist these
-
1
module DTK
-
1
class Violation < Model
-
1
def self.find_missing_required_attributes(level,commit_task)
-
component_actions = commit_task.component_actions
-
errors = Array.new
-
component_actions.each do |action|
-
AttributeComplexType.flatten_attribute_list(action[:attributes],:flatten_nil_value=>true).each do |attr|
-
# TODO: need to distingusih between legitimate nil value and unset
-
if attr[:required] and attr[:attribute_value].nil? and (not attr[:port_type] == "input") and (not attr[:dynamic])
-
aug_attr = attr.merge(:nested_component => action[:component], :node => action[:node])
-
errors << MissingRequiredAttribute.new(aug_attr)
-
end
-
end
-
end
-
errors.empty? ? nil : ErrorViolations.new(errors)
-
end
-
-
1
def self.save(parent,violation_expression,opts={})
-
expression_list = ret_expression_list(violation_expression)
-
save_list(parent,expression_list,opts)
-
end
-
-
1
def self.ret_violations(target_node_id_handles)
-
ret = Array.new
-
return ret if target_node_id_handles.empty?
-
Node.get_violations(target_node_id_handles)
-
end
-
1
def self.update_violations(target_node_id_handles)
-
ret = Array.new
-
saved_violations = ret_violations(target_node_id_handles)
-
return ret if saved_violations.empty?
-
sample_idh = target_node_id_handles.first
-
-
viol_idhs_to_delete = Array.new
-
saved_violations.each do |v|
-
raise Error.new("Not treating expression form") unless constraint_hash = v[:expression][:constraint]
-
constraint = Constraint.create(constraint_hash)
-
vtttype = constraint[:target_type]
-
target_idh = sample_idh.createIDH(:model_name => vt_model_name(vtttype),:id => constraint[:target_id])
-
target = {vtttype => target_idh}
-
if constraint.evaluate_given_target(target)
-
Log.info("violation with id #{v[:id].to_s} no longer applicable; being removed")
-
viol_idhs_to_delete << sample_idh.createIDH(:model_name => :violation,:id => v[:id])
-
else
-
ret << v
-
end
-
end
-
delete_instances(viol_idhs_to_delete) unless viol_idhs_to_delete.empty?
-
ret
-
end
-
-
1
private
-
1
def self.vt_model_name(vtttype)
-
ret = VTModelName[vtttype]
-
return ret if ret
-
raise Error.new("Unexpected violaition target type #{vtttype}")
-
end
-
1
VTModelName = {
-
"target_node_id_handle" => :node
-
}
-
1
def self.ret_expression_list(expression)
-
return Array.new if expression[:elements].empty?
-
return expression unless expression[:logical_op] == :and
-
expression[:elements].map do |expr_el|
-
if expr_el.kind_of?(Constraint) then expr_el.merge(:violation_target => expression[:violation_target])
-
elsif (not expr_el.logical_op == :and) then expr_el
-
else expr_el.map{|x|ret_expression_list(x)}
-
end
-
end.flatten
-
end
-
-
1
def self.save_list(parent,expression_list,opts={})
-
# each element of expression_list will either be constraint or a disjunction
-
parent_idh = parent.id_handle_with_auth_info()
-
parent_mn = parent_idh[:model_name]
-
violation_mh = parent_idh.create_childMH(:violation)
-
parent_id = parent_idh.get_id()
-
parent_col = DB.parent_field(parent_mn,:violation)
-
-
create_rows = Array.new
-
target_node_id_handles = Array.new
-
expression_list.each do |e|
-
sample_constraint = e.kind_of?(Constraint) ? e : e.constraint_list.first
-
vt = e[:violation_target]
-
raise Error.new("target type #{vt[:type]} not treated") unless vt[:type] == "target_node_id_handle"
-
description = e.kind_of?(Constraint) ? e[:description] : e[:elements].map{|x|x[:description]}.join(" or ")
-
ref = "violation" #TODO: stub
-
new_item = {
-
:ref => ref,
-
parent_col => parent_id,
-
:severity => sample_constraint[:severity],
-
:target_node_id => vt[:id],
-
:expression => violation_expression_for_db(e),
-
:description => description
-
}
-
create_rows << new_item
-
target_node_id_handles << vt[:id_handle]
-
end
-
saved_violations = ret_violations(target_node_id_handles)
-
create_rows = prune_duplicate_violations(create_rows,saved_violations)
-
create_from_rows(violation_mh,create_rows, :convert => true) unless create_rows.empty?
-
end
-
-
1
def self.violation_expression_for_db(expr)
-
raise Error.new("Violation expression form not treated") unless expr.kind_of?(Constraint)
-
{
-
:constraint => {
-
:type => expr[:type],
-
:component_component_id => expr[:component_component_id],
-
:attribute_attribute_id => expr[:attribute_attribute_id],
-
:negate => expr[:negate],
-
:search_pattern => SearchPattern.process_symbols(expr[:search_pattern]),
-
:target_type => expr[:violation_target][:type],
-
:target_id => expr[:violation_target][:id],
-
:id => expr[:id]
-
}
-
}
-
end
-
-
1
def self.prune_duplicate_violations(create_rows,saved_violations)
-
return create_rows if saved_violations.empty?
-
create_rows.reject do |r|
-
unless c1 = r[:expression][:constraint]
-
Log.error("Not treating expressions of form #{r[:expression].keys.first}")
-
next
-
end
-
saved_violations.find do |sv|
-
unless c2 = sv[:expression][:constraint]
-
Log.error("Not treating expressions of form #{sv[:expression].keys.first}")
-
next
-
end
-
(r[:severity] == sv[:severity]) and (c1[:id] == c2[:id]) and (c1[:target_id] = c2[:target_id])
-
end
-
end
-
end
-
-
1
public
-
1
class ErrorViolation < ErrorUsage
-
end
-
1
class MissingRequiredAttribute < ErrorViolation
-
1
def initialize(aug_attr)
-
@aug_attr = aug_attr
-
super(error_msg(aug_attr))
-
end
-
1
private
-
1
def error_msg(aug_attr)
-
"The attribute (#{aug_attr.print_form()[:display_name]}) is required, but missing"
-
end
-
end
-
-
1
class Expression < HashObject
-
1
def initialize(violation_target,logical_op)
-
hash = {
-
:violation_target => Target.new(violation_target),
-
:logical_op => logical_op,
-
:elements => Array.new
-
}
-
super(hash)
-
end
-
-
1
def <<(expr)
-
self[:elements] << expr
-
self
-
end
-
-
1
def constraint_list()
-
self[:elements].map do |e|
-
e.kind_of?(Constraint) ? e : e.constraint_list()
-
end.flatten
-
end
-
-
1
def self.and(*exprs)
-
vt = exprs.first.violation_target
-
exprs[1..exprs.size-1].map do |e|
-
unless vt == e[:violation_target]
-
raise Error.new("Not supported conjunction of expressions with different violation_targets")
-
end
-
end
-
ret = new(vt,:and)
-
exprs.each{|e|ret << e}
-
ret
-
end
-
-
1
def empty?()
-
self[:elements].empty?()
-
end
-
-
1
def pp_form()
-
Array.new if self[:elements].empty?
-
args = self[:elements].map{|x|x.kind_of?(Constraint) ? x[:description] : x.pp_form}
-
args.size == 1 ? args.first : [self[:logical_op]] + args
-
end
-
end
-
-
1
class Target < HashObject
-
1
def initialize(key_idh)
-
hash = {
-
:type => key_idh.keys.first,
-
:id_handle => key_idh.values.first,
-
:id => key_idh.values.first.get_id()
-
}
-
super(hash)
-
end
-
-
1
def ==(vt2)
-
(self[:type] == vt2[:type]) and (self[:id] == vt2[:id])
-
end
-
end
-
end
-
end
-
-
1
module DTK
-
1
class Workspace < Assembly::Instance
-
1
def self.create_from_id_handle(idh)
-
idh.create_object(:model_name => :assembly_workspace)
-
end
-
-
# creates both a service, module branch, assembly instance and assembly template for the workspace
-
1
def self.create?(target_idh,project_idh)
-
Factory.create?(target_idh,project_idh)
-
end
-
-
1
def self.is_workspace?(obj)
-
obj.kind_of?(self) or (AssemblyFields[:ref] == obj.get_field?(:ref))
-
end
-
# if is workspace it convents to workspace object
-
1
def self.workspace?(obj)
-
if is_workspace?(obj)
-
create_from_id_handle(obj.id_handle).merge(obj)
-
end
-
end
-
-
1
def self.get_workspace(workspace_mh,opts={})
-
opts_get = Aux.hash_subset(opts,:cols).merge(:filter => [:eq,:ref,AssemblyFields[:ref]])
-
rows = Workspace.get(workspace_mh,opts_get)
-
unless rows.size == 1
-
Log.error_pp(["Unexpected that get_workspace does not return 1 row",rows])
-
return nil
-
end
-
rows.first
-
end
-
-
1
def purge(opts={})
-
opts.merge!(:do_not_raise => true)
-
self.class.delete_contents([id_handle()],opts)
-
delete_assembly_level_attributes()
-
delete_tasks()
-
end
-
-
# opts has :mode
-
# three modes
-
# :direct - direct command called (default)
-
# :from_set_default_target
-
# :from_delete_target
-
1
def self.set_target(target,opts={})
-
if workspace = get_workspace(target.model_handle(:assembly_workspace))
-
workspace.set_target(target,opts)
-
end
-
end
-
1
def set_target(target,opts={})
-
return unless target
-
mode = opts[:mode]|| :direct
-
current_target = get_target()
-
if current_target && current_target.id == target.id
-
if mode == :direct
-
raise ErrorUsage::Warning.new("Target is already set to #{target.get_field?(:display_name)}")
-
end
-
return
-
end
-
-
update = true
-
unless op_status_all_pending?()
-
case mode
-
when :direct
-
raise ErrorUsage.new("The command 'set-target' can only be invoked before the workspace has been converged (i.e., is in 'pending' state)")
-
when :from_set_default_target
-
# treated as no op (keep workspace as is)
-
update = false
-
when :from_delete_target
-
# want to update so deleting target does not have foreign key that causes the workspace object to be deleted
-
update = true
-
else
-
raise Error.new("Unexpected mode '#{mode}'")
-
end
-
end
-
if update
-
update(:datacenter_datacenter_id => target.id)
-
end
-
end
-
-
1
def self.is_workspace_service_module?(service_module)
-
service_module.get_field?(:display_name) == ServiceModuleFields[:display_name]
-
end
-
-
1
def self.is_workspace_service_module?(service_module)
-
service_module.get_field?(:display_name) == ServiceModuleFields[:display_name]
-
end
-
-
1
private
-
1
def delete_tasks()
-
clear_tasks(:include_executing_task => true)
-
end
-
-
1
def delete_assembly_level_attributes()
-
assembly_attrs = get_assembly_level_attributes()
-
return if assembly_attrs.empty?()
-
Model.delete_instances(assembly_attrs.map{|r|r.id_handle()})
-
end
-
-
1
AssemblyFields = {
-
:ref => '__workspace',
-
:component_type => 'workspace',
-
:version => 'master',
-
:description => 'Private workspace'
-
}
-
1
ServiceModuleFields = {
-
:display_name => '.workspace'
-
}
-
-
1
class Factory < self
-
1
def self.create?(target_idh,project_idh)
-
factory = new(target_idh,project_idh)
-
workspace_template_idh = factory.create_assembly?(:template,:project_project_id => project_idh.get_id())
-
instance_assigns = {
-
:datacenter_datacenter_id => target_idh.get_id(),
-
:ancestor_id => workspace_template_idh.get_id()
-
}
-
factory.create_assembly?(:instance,instance_assigns)
-
end
-
-
1
def create_assembly?(type,assigns)
-
ref = AssemblyFields[:ref]
-
match_assigns = {:ref => ref}.merge(assigns)
-
other_assigns = {
-
:display_name => AssemblyFields[:component_type],
-
:component_type => AssemblyFields[:component_type],
-
:version => AssemblyFields[:version],
-
:description => AssemblyFields[:description],
-
:module_branch_id => @module_branch_idh.get_id(),
-
:type => (type == :template) ? 'template' : 'composite'
-
}
-
cmp_mh_with_parent = @component_mh.merge(:parent_model_name => (type == :template ? :project : :datacenter))
-
Model.create_from_row?(cmp_mh_with_parent,ref,match_assigns,other_assigns)
-
end
-
1
private
-
1
def initialize(target_idh,project_idh)
-
@component_mh = target_idh.createMH(:component)
-
module_and_branch_info = create_service_and_module_branch?(project_idh)
-
@module_branch_idh = create_service_and_module_branch?(project_idh)
-
end
-
-
1
def create_service_and_module_branch?(project_idh)
-
project = project_idh.create_object()
-
service_module_name = ServiceModuleFields[:display_name]
-
version = nil
-
# TODO: Here namespace object is set to nil maybe this needs to be changed
-
if service_module_branch = ServiceModule.get_workspace_module_branch(project,service_module_name,version,nil,:no_error_if_does_not_exist=>true)
-
service_module_branch.id_handle()
-
else
-
local_params = ModuleBranch::Location::LocalParams::Server.new(
-
:module_type => :service_module,
-
:module_name => service_module_name,
-
:namespace => Namespace.default_namespace(project.model_handle(:namespace)),
-
:version => version
-
)
-
-
# TODO: look to remove :config_agent_type
-
module_and_branch_info = ServiceModule.create_module(project,local_params,:config_agent_type => ConfigAgent::Type.default_symbol)
-
service_module = module_and_branch_info[:module_idh].create_object()
-
service_module.update(:dsl_parsed => true)
-
-
branch_idh = module_and_branch_info[:module_branch_idh]
-
branch = branch_idh.create_object()
-
branch.set_dsl_parsed!(true)
-
-
branch_idh
-
end
-
end
-
-
end
-
end
-
end
-
1
def r8_require(*files_x)
-
24
files = (files_x.first.kind_of?(Array) ? files_x.first : files_x)
-
24
caller_dir = caller.first.gsub(/\/[^\/]+$/,"")
-
48
files.each{|f|require File.expand_path(f,caller_dir)}
-
end
-
1
def r8_nested_require(dir,*files_x)
-
377
files = (files_x.first.kind_of?(Array) ? files_x.first : files_x)
-
377
caller_dir = caller.first.gsub(/\/[^\/]+$/,"")
-
793
files.each{|f|require File.expand_path("#{dir}/#{f}",caller_dir)}
-
end
-
-
1
def r8_nested_require_with_caller_dir(caller_dir,dir,*files_x)
-
2
files = (files_x.first.kind_of?(Array) ? files_x.first : files_x)
-
4
files.each{|f|require File.expand_path("#{dir}/#{f}",caller_dir)}
-
end
-
1
def r8_require_common_lib(*files_x)
-
files_x.each { |file| dtk_require_dtk_common_file(file) }
-
end
-
##### TODO: deprecate forp above
-
1
def dtk_require(*files_x)
-
1
files = (files_x.first.kind_of?(Array) ? files_x.first : files_x)
-
1
caller_dir = caller.first.gsub(/\/[^\/]+$/,"")
-
2
files.each{|f|require File.expand_path(f,caller_dir)}
-
end
-
1
def dtk_nested_require(dir,*files_x)
-
files = (files_x.first.kind_of?(Array) ? files_x.first : files_x)
-
caller_dir = caller.first.gsub(/\/[^\/]+$/,"")
-
files.each{|f|require File.expand_path("#{dir}/#{f}",caller_dir)}
-
end
-
-
1
def dtk_nested_require_with_caller_dir(caller_dir,dir,*files_x)
-
files = (files_x.first.kind_of?(Array) ? files_x.first : files_x)
-
files.each{|f|require File.expand_path("#{dir}/#{f}",caller_dir)}
-
end
-
1
def dtk_require_common_lib(*files_x)
-
files_x.each { |file| dtk_require_dtk_common_file(file) }
-
end
-
-
# Method will check if there is localy avaialbe l
-
1
def dtk_require_common_library()
-
-
1
common_folder = determine_common_folder()
-
-
1
unless common_folder
-
require 'dtk_common'
-
else
-
1
dtk_require_dtk_common_file('dtk_common')
-
end
-
end
-
# determining if dtk-common is locally available
-
-
-
-
1
private
-
-
1
POSSIBLE_COMMON_CORE_FOLDERS = ['dtk-common','common','dtk_common']
-
-
1
def dtk_require_dtk_common_file(common_library)
-
# use common folder else common gem
-
1
common_folder = determine_common_folder()
-
-
1
if common_folder
-
1
dtk_require("../../" + common_folder + "/lib/#{common_library}")
-
elsif is_dtk_common_gem_installed?
-
# already loaded so do not do anything
-
else
-
raise DTK::Client::DtkError,"Common directory/gem not found, please make sure that you have cloned dtk-common folder or installed dtk common gem!"
-
end
-
end
-
-
-
1
def gem_only_available?()
-
return !determine_common_folder() && is_dtk_common_gem_installed?
-
end
-
-
##
-
# Check if dtk-common gem has been installed if so use common gem. If there is no gem
-
# logic from dtk_require_dtk_common will try to find commond folder.
-
# DEVELOPER NOTE: Uninstall dtk-common gem when changing dtk-common to avoid re-building gem.
-
1
def is_dtk_common_gem_installed?
-
begin
-
# if no exception gem is found
-
gem 'dtk-common'
-
return true
-
rescue Gem::LoadError
-
return false
-
end
-
end
-
-
##
-
# Checks for expected names of dtk-common folder and returns name of existing common folder
-
1
def determine_common_folder
-
2
POSSIBLE_COMMON_CORE_FOLDERS.each do |folder|
-
2
path = File.join(File.dirname(__FILE__),'..','..',folder)
-
2
return folder if File.directory?(path)
-
end
-
-
return nil
-
end
-
######
-
-
# TODO: deprecate of make thsi applicable to 1.9.3
-
##### for upgrading to ruby 1.9.2
-
1
class Hash
-
1
if RUBY_VERSION == "1.9.2"
-
def select192(&block)
-
select(&block)
-
end
-
def find192(&block)
-
find(&block)
-
end
-
else
-
1
def select192(&block)
-
select(&block).inject({}){|h,kv|h.merge(kv[0] => kv[1])}
-
end
-
1
def find192(&block)
-
find(&block).inject({}){|h,kv|h.merge(kv[0] => kv[1])}
-
end
-
end
-
end
-
-
# Adding active support
-
1
require 'active_support/core_ext/hash'
-